[ 598.337944] env[69927]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=69927) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 598.338362] env[69927]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=69927) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 598.338519] env[69927]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=69927) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 598.338922] env[69927]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 598.451895] env[69927]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=69927) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 598.461745] env[69927]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=69927) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 598.506802] env[69927]: INFO oslo_service.periodic_task [-] Skipping periodic task _heal_instance_info_cache because its interval is negative [ 599.070690] env[69927]: INFO nova.virt.driver [None req-220bae7c-9940-4160-b823-6f2d4c16cb62 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 599.142903] env[69927]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 599.143075] env[69927]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 599.143173] env[69927]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=69927) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 602.090075] env[69927]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-dab1a89f-28b5-41e7-aaa0-97d408085f26 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.107984] env[69927]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=69927) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 602.108164] env[69927]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-d2757c60-ae90-4b07-a305-426f9608c48d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.134867] env[69927]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 9e6f9. [ 602.135060] env[69927]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 2.992s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 602.135627] env[69927]: INFO nova.virt.vmwareapi.driver [None req-220bae7c-9940-4160-b823-6f2d4c16cb62 None None] VMware vCenter version: 7.0.3 [ 602.139080] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f202ac-35ab-49f4-a29d-0c2d2a6e679d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.157427] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e5f2ef-249f-4c2a-86c7-d9464b702b68 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.164480] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c468d4e-6099-4fbb-800b-7ad9db0f2e0d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.171392] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89be8dd8-9de6-498c-b2c5-ccbc6eee3fbf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.185800] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9baeb0a6-f78f-4b48-ade0-f27ee861af2a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.192325] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c96430ca-ec53-4fbd-9a5a-6a4c2708b548 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.223229] env[69927]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-78c3b800-66b8-460d-bc6f-fec0365e9485 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.229118] env[69927]: DEBUG nova.virt.vmwareapi.driver [None req-220bae7c-9940-4160-b823-6f2d4c16cb62 None None] Extension org.openstack.compute already exists. {{(pid=69927) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 602.231895] env[69927]: INFO nova.compute.provider_config [None req-220bae7c-9940-4160-b823-6f2d4c16cb62 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 602.735181] env[69927]: DEBUG nova.context [None req-220bae7c-9940-4160-b823-6f2d4c16cb62 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),cf67d028-25b0-4af1-93f1-aeda0e2e3921(cell1) {{(pid=69927) load_cells /opt/stack/nova/nova/context.py:464}} [ 602.737413] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.737663] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.738412] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 602.738936] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] Acquiring lock "cf67d028-25b0-4af1-93f1-aeda0e2e3921" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.739075] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] Lock "cf67d028-25b0-4af1-93f1-aeda0e2e3921" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.740110] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] Lock "cf67d028-25b0-4af1-93f1-aeda0e2e3921" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 602.761304] env[69927]: INFO dbcounter [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] Registered counter for database nova_cell0 [ 602.769929] env[69927]: INFO dbcounter [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] Registered counter for database nova_cell1 [ 602.773502] env[69927]: DEBUG oslo_db.sqlalchemy.engines [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=69927) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 602.774143] env[69927]: DEBUG oslo_db.sqlalchemy.engines [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=69927) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 602.779240] env[69927]: ERROR nova.db.main.api [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 602.779240] env[69927]: result = function(*args, **kwargs) [ 602.779240] env[69927]: File "/opt/stack/nova/nova/utils.py", line 672, in context_wrapper [ 602.779240] env[69927]: return func(*args, **kwargs) [ 602.779240] env[69927]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 602.779240] env[69927]: result = fn(*args, **kwargs) [ 602.779240] env[69927]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 602.779240] env[69927]: return f(*args, **kwargs) [ 602.779240] env[69927]: File "/opt/stack/nova/nova/objects/service.py", line 557, in _db_service_get_minimum_version [ 602.779240] env[69927]: return db.service_get_minimum_version(context, binaries) [ 602.779240] env[69927]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 602.779240] env[69927]: _check_db_access() [ 602.779240] env[69927]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 602.779240] env[69927]: stacktrace = ''.join(traceback.format_stack()) [ 602.779240] env[69927]: [ 602.780346] env[69927]: ERROR nova.db.main.api [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 602.780346] env[69927]: result = function(*args, **kwargs) [ 602.780346] env[69927]: File "/opt/stack/nova/nova/utils.py", line 672, in context_wrapper [ 602.780346] env[69927]: return func(*args, **kwargs) [ 602.780346] env[69927]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 602.780346] env[69927]: result = fn(*args, **kwargs) [ 602.780346] env[69927]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 602.780346] env[69927]: return f(*args, **kwargs) [ 602.780346] env[69927]: File "/opt/stack/nova/nova/objects/service.py", line 557, in _db_service_get_minimum_version [ 602.780346] env[69927]: return db.service_get_minimum_version(context, binaries) [ 602.780346] env[69927]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 602.780346] env[69927]: _check_db_access() [ 602.780346] env[69927]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 602.780346] env[69927]: stacktrace = ''.join(traceback.format_stack()) [ 602.780346] env[69927]: [ 602.780781] env[69927]: WARNING nova.objects.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] Failed to get minimum service version for cell cf67d028-25b0-4af1-93f1-aeda0e2e3921 [ 602.780894] env[69927]: WARNING nova.objects.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 602.781357] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] Acquiring lock "singleton_lock" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.781523] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] Acquired lock "singleton_lock" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 602.781766] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] Releasing lock "singleton_lock" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 602.782122] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] Full set of CONF: {{(pid=69927) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/service.py:357}} [ 602.782270] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ******************************************************************************** {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 602.782403] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] Configuration options gathered from: {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 602.782565] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 602.782767] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 602.782897] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ================================================================================ {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 602.783123] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] allow_resize_to_same_host = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.783301] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] arq_binding_timeout = 300 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.783448] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] backdoor_port = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.783582] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] backdoor_socket = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.783749] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] block_device_allocate_retries = 60 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.783913] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] block_device_allocate_retries_interval = 3 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.784105] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cert = self.pem {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.784279] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.784449] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] compute_monitors = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.784620] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] config_dir = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.784866] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] config_drive_format = iso9660 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.785029] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.785216] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] config_source = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.785391] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] console_host = devstack {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.785563] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] control_exchange = nova {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.785727] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cpu_allocation_ratio = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.785890] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] daemon = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.786068] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] debug = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.786232] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] default_access_ip_network_name = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.786399] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] default_availability_zone = nova {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.786558] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] default_ephemeral_format = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.786720] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] default_green_pool_size = 1000 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.786956] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.787140] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] default_schedule_zone = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.787301] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] disk_allocation_ratio = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.787462] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] enable_new_services = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.787642] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] enabled_apis = ['osapi_compute'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.787806] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] enabled_ssl_apis = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.787968] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] flat_injected = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.788145] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] force_config_drive = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.788308] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] force_raw_images = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.788482] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] graceful_shutdown_timeout = 5 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.788648] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] heal_instance_info_cache_interval = -1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.788877] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] host = cpu-1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.789077] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.789251] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] initial_disk_allocation_ratio = 1.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.789413] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] initial_ram_allocation_ratio = 1.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.789636] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.789804] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] instance_build_timeout = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.789967] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] instance_delete_interval = 300 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.790153] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] instance_format = [instance: %(uuid)s] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.790325] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] instance_name_template = instance-%08x {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.790487] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] instance_usage_audit = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.790660] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] instance_usage_audit_period = month {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.790827] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.790994] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] instances_path = /opt/stack/data/nova/instances {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.791176] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] internal_service_availability_zone = internal {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.791361] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] key = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.791548] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] live_migration_retry_count = 30 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.791726] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] log_color = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.791893] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] log_config_append = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.792077] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.792244] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] log_dir = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.792431] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] log_file = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.792569] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] log_options = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.792731] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] log_rotate_interval = 1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.792904] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] log_rotate_interval_type = days {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.793088] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] log_rotation_type = none {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.793224] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.793351] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.793526] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.793693] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.793822] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.793990] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] long_rpc_timeout = 1800 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.794167] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] max_concurrent_builds = 10 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.794376] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] max_concurrent_live_migrations = 1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.794580] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] max_concurrent_snapshots = 5 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.794752] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] max_local_block_devices = 3 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.794994] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] max_logfile_count = 30 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.795088] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] max_logfile_size_mb = 200 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.795254] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] maximum_instance_delete_attempts = 5 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.795426] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] metadata_listen = 0.0.0.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.795601] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] metadata_listen_port = 8775 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.795775] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] metadata_workers = 2 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.795938] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] migrate_max_retries = -1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.796129] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] mkisofs_cmd = genisoimage {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.796346] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] my_block_storage_ip = 10.180.1.21 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.796484] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] my_ip = 10.180.1.21 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.796693] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.796857] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] network_allocate_retries = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.797051] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.797227] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] osapi_compute_listen = 0.0.0.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.797392] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] osapi_compute_listen_port = 8774 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.797561] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] osapi_compute_unique_server_name_scope = {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.797735] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] osapi_compute_workers = 2 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.797900] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] password_length = 12 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.798073] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] periodic_enable = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.798237] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] periodic_fuzzy_delay = 60 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.798408] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] pointer_model = usbtablet {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.798579] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] preallocate_images = none {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.798741] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] publish_errors = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.798872] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] pybasedir = /opt/stack/nova {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.799043] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ram_allocation_ratio = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.799210] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] rate_limit_burst = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.799378] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] rate_limit_except_level = CRITICAL {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.799539] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] rate_limit_interval = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.799702] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] reboot_timeout = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.799861] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] reclaim_instance_interval = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.800034] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] record = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.800210] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] reimage_timeout_per_gb = 60 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.800382] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] report_interval = 120 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.800544] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] rescue_timeout = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.800705] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] reserved_host_cpus = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.800866] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] reserved_host_disk_mb = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.801033] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] reserved_host_memory_mb = 512 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.801200] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] reserved_huge_pages = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.801363] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] resize_confirm_window = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.801523] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] resize_fs_using_block_device = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.801686] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] resume_guests_state_on_host_boot = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.801856] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.802030] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] rpc_response_timeout = 60 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.802196] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] run_external_periodic_tasks = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.802364] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] running_deleted_instance_action = reap {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.802561] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] running_deleted_instance_poll_interval = 1800 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.802792] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] running_deleted_instance_timeout = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.802901] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] scheduler_instance_sync_interval = 120 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.803082] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] service_down_time = 720 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.803256] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] servicegroup_driver = db {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.803424] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] shell_completion = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.803606] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] shelved_offload_time = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.803767] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] shelved_poll_interval = 3600 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.803935] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] shutdown_timeout = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.804109] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] source_is_ipv6 = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.804271] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ssl_only = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.804546] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.804725] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] sync_power_state_interval = 600 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.804921] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] sync_power_state_pool_size = 1000 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.805130] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] syslog_log_facility = LOG_USER {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.805279] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] tempdir = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.805443] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] timeout_nbd = 10 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.805617] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] transport_url = **** {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.805782] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] update_resources_interval = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.805944] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] use_cow_images = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.806121] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] use_journal = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.806283] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] use_json = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.806443] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] use_rootwrap_daemon = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.806604] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] use_stderr = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.806764] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] use_syslog = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.806922] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vcpu_pin_set = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.807104] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vif_plugging_is_fatal = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.807277] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vif_plugging_timeout = 300 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.807444] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] virt_mkfs = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.807609] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] volume_usage_poll_interval = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.807775] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] watch_log_file = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.807945] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] web = /usr/share/spice-html5 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 602.808151] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 602.808322] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 602.808490] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 602.808668] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_concurrency.disable_process_locking = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.274631] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.274995] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.275115] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.275265] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.275442] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.275612] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.275798] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api.auth_strategy = keystone {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.275972] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api.compute_link_prefix = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.276168] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.276346] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api.dhcp_domain = novalocal {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.276517] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api.enable_instance_password = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.276688] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api.glance_link_prefix = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.276851] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.277033] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.277203] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api.instance_list_per_project_cells = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.277366] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api.list_records_by_skipping_down_cells = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.277528] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api.local_metadata_per_cell = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.277703] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api.max_limit = 1000 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.277870] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api.metadata_cache_expiration = 15 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.278055] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api.neutron_default_tenant_id = default {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.278234] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api.response_validation = warn {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.278407] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api.use_neutron_default_nets = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.278576] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.278738] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.278903] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.279091] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.279271] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api.vendordata_dynamic_targets = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.279438] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api.vendordata_jsonfile_path = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.279626] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.279826] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.backend = dogpile.cache.memcached {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.279999] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.backend_argument = **** {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.280178] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.backend_expiration_time = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.280379] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.config_prefix = cache.oslo {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.280575] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.dead_timeout = 60.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.280743] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.debug_cache_backend = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.280908] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.enable_retry_client = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.281086] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.enable_socket_keepalive = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.281268] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.enabled = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.281436] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.enforce_fips_mode = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.281606] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.expiration_time = 600 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.281771] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.hashclient_retry_attempts = 2 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.281939] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.hashclient_retry_delay = 1.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.282120] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.memcache_dead_retry = 300 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.282286] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.memcache_password = **** {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.282473] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.282656] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.282821] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.memcache_pool_maxsize = 10 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.282987] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.283173] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.memcache_sasl_enabled = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.283359] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.283554] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.memcache_socket_timeout = 1.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.283723] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.memcache_username = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.283888] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.proxies = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.284062] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.redis_db = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.284228] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.redis_password = **** {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.284400] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.redis_sentinel_service_name = mymaster {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.284576] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.284747] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.redis_server = localhost:6379 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.284910] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.redis_socket_timeout = 1.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.285084] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.redis_username = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.285252] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.retry_attempts = 2 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.285418] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.retry_delay = 0.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.285585] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.socket_keepalive_count = 1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.285742] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.socket_keepalive_idle = 1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.285900] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.socket_keepalive_interval = 1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.286069] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.tls_allowed_ciphers = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.286233] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.tls_cafile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.286391] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.tls_certfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.286552] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.tls_enabled = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.286711] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cache.tls_keyfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.286880] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cinder.auth_section = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.287066] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cinder.auth_type = password {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.287229] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cinder.cafile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.287411] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cinder.catalog_info = volumev3::publicURL {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.287573] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cinder.certfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.287737] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cinder.collect_timing = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.287903] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cinder.cross_az_attach = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.288080] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cinder.debug = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.288244] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cinder.endpoint_template = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.288407] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cinder.http_retries = 3 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.288569] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cinder.insecure = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.288726] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cinder.keyfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.288966] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cinder.os_region_name = RegionOne {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.289171] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cinder.split_loggers = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.289336] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cinder.timeout = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.289511] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.289673] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] compute.cpu_dedicated_set = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.289831] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] compute.cpu_shared_set = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.289996] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] compute.image_type_exclude_list = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.290177] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.290340] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] compute.max_concurrent_disk_ops = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.290510] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] compute.max_disk_devices_to_attach = -1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.290666] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.290835] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.290998] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] compute.resource_provider_association_refresh = 300 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.291173] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.291335] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] compute.shutdown_retry_interval = 10 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.291514] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.291693] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] conductor.workers = 2 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.291871] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] console.allowed_origins = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.292044] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] console.ssl_ciphers = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.292220] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] console.ssl_minimum_version = default {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.292389] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] consoleauth.enforce_session_timeout = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.292594] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] consoleauth.token_ttl = 600 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.292771] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cyborg.cafile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.292935] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cyborg.certfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.293126] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cyborg.collect_timing = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.293293] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cyborg.connect_retries = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.293476] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cyborg.connect_retry_delay = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.293653] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cyborg.endpoint_override = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.293819] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cyborg.insecure = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.293980] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cyborg.keyfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.294161] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cyborg.max_version = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.294325] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cyborg.min_version = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.294486] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cyborg.region_name = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.294651] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cyborg.retriable_status_codes = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.294810] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cyborg.service_name = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.294980] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cyborg.service_type = accelerator {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.295159] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cyborg.split_loggers = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.295318] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cyborg.status_code_retries = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.295480] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cyborg.status_code_retry_delay = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.295641] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cyborg.timeout = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.295824] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.295985] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] cyborg.version = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.296173] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] database.asyncio_connection = **** {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.296336] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] database.asyncio_slave_connection = **** {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.296506] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] database.backend = sqlalchemy {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.296679] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] database.connection = **** {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.296845] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] database.connection_debug = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.297025] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] database.connection_parameters = {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.297195] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] database.connection_recycle_time = 3600 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.297358] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] database.connection_trace = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.297521] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] database.db_inc_retry_interval = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.297685] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] database.db_max_retries = 20 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.297846] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] database.db_max_retry_interval = 10 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.298017] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] database.db_retry_interval = 1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.298187] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] database.max_overflow = 50 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.298351] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] database.max_pool_size = 5 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.298514] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] database.max_retries = 10 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.298687] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.298846] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] database.mysql_wsrep_sync_wait = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.299008] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] database.pool_timeout = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.299221] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] database.retry_interval = 10 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.299341] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] database.slave_connection = **** {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.299501] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] database.sqlite_synchronous = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.299668] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] database.use_db_reconnect = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.299835] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api_database.asyncio_connection = **** {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.299994] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api_database.asyncio_slave_connection = **** {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.300180] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api_database.backend = sqlalchemy {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.300375] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api_database.connection = **** {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.300553] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api_database.connection_debug = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.300729] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api_database.connection_parameters = {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.300894] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api_database.connection_recycle_time = 3600 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.301071] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api_database.connection_trace = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.301241] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api_database.db_inc_retry_interval = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.301409] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api_database.db_max_retries = 20 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.301591] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api_database.db_max_retry_interval = 10 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.301735] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api_database.db_retry_interval = 1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.301896] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api_database.max_overflow = 50 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.302074] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api_database.max_pool_size = 5 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.302240] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api_database.max_retries = 10 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.302435] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.302610] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.302777] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api_database.pool_timeout = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.302941] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api_database.retry_interval = 10 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.303114] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api_database.slave_connection = **** {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.303279] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] api_database.sqlite_synchronous = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.303472] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] devices.enabled_mdev_types = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.303666] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.303840] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ephemeral_storage_encryption.default_format = luks {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.304009] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ephemeral_storage_encryption.enabled = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.304186] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.304360] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.api_servers = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.304528] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.cafile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.304695] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.certfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.304857] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.collect_timing = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.305027] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.connect_retries = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.305193] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.connect_retry_delay = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.305356] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.debug = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.305531] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.default_trusted_certificate_ids = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.305687] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.enable_certificate_validation = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.305849] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.enable_rbd_download = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.306014] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.endpoint_override = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.306187] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.insecure = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.306349] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.keyfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.306508] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.max_version = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.306665] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.min_version = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.306826] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.num_retries = 3 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.306995] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.rbd_ceph_conf = {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.307173] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.rbd_connect_timeout = 5 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.307343] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.rbd_pool = {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.307511] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.rbd_user = {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.307683] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.region_name = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.307845] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.retriable_status_codes = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.308011] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.service_name = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.308190] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.service_type = image {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.308352] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.split_loggers = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.308513] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.status_code_retries = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.308671] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.status_code_retry_delay = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.308829] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.timeout = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.309027] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.309188] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.verify_glance_signatures = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.309349] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] glance.version = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.309516] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] guestfs.debug = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.309686] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] manila.auth_section = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.309851] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] manila.auth_type = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.310018] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] manila.cafile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.310183] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] manila.certfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.310376] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] manila.collect_timing = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.310558] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] manila.connect_retries = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.310723] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] manila.connect_retry_delay = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.310887] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] manila.endpoint_override = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.311065] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] manila.insecure = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.311230] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] manila.keyfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.311392] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] manila.max_version = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.311556] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] manila.min_version = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.311741] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] manila.region_name = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.311907] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] manila.retriable_status_codes = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.312079] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] manila.service_name = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.312258] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] manila.service_type = shared-file-system {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.312437] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] manila.share_apply_policy_timeout = 10 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.312620] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] manila.split_loggers = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.312784] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] manila.status_code_retries = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.312944] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] manila.status_code_retry_delay = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.313118] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] manila.timeout = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.313305] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.313491] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] manila.version = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.313675] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] mks.enabled = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.314062] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.314262] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] image_cache.manager_interval = 2400 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.314436] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] image_cache.precache_concurrency = 1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.314610] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] image_cache.remove_unused_base_images = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.314780] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.314949] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.315142] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] image_cache.subdirectory_name = _base {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.315320] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.api_max_retries = 60 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.315488] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.api_retry_interval = 2 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.315652] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.auth_section = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.315817] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.auth_type = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.315976] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.cafile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.316150] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.certfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.316316] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.collect_timing = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.316480] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.conductor_group = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.316639] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.connect_retries = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.316799] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.connect_retry_delay = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.316959] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.endpoint_override = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.317137] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.insecure = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.317297] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.keyfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.317457] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.max_version = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.317621] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.min_version = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.317801] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.peer_list = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.317946] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.region_name = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.318118] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.retriable_status_codes = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.318284] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.serial_console_state_timeout = 10 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.318446] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.service_name = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.318619] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.service_type = baremetal {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.318779] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.shard = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.318941] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.split_loggers = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.319113] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.status_code_retries = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.319275] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.status_code_retry_delay = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.319432] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.timeout = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.319624] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.319798] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ironic.version = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.319985] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.320177] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] key_manager.fixed_key = **** {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.320385] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.320562] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican.barbican_api_version = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.320726] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican.barbican_endpoint = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.320901] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican.barbican_endpoint_type = public {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.321077] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican.barbican_region_name = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.321245] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican.cafile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.321405] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican.certfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.321572] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican.collect_timing = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.321735] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican.insecure = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.321908] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican.keyfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.322064] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican.number_of_retries = 60 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.322227] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican.retry_delay = 1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.322389] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican.send_service_user_token = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.322582] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican.split_loggers = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.322748] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican.timeout = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.322914] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican.verify_ssl = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.323090] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican.verify_ssl_path = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.323263] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican_service_user.auth_section = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.323444] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican_service_user.auth_type = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.323623] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican_service_user.cafile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.323785] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican_service_user.certfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.323950] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican_service_user.collect_timing = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.324129] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican_service_user.insecure = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.324290] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican_service_user.keyfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.324456] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican_service_user.split_loggers = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.324619] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] barbican_service_user.timeout = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.324787] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vault.approle_role_id = **** {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.324948] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vault.approle_secret_id = **** {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.325133] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vault.kv_mountpoint = secret {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.325299] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vault.kv_path = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.325465] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vault.kv_version = 2 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.325628] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vault.namespace = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.325803] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vault.root_token_id = **** {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.325977] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vault.ssl_ca_crt_file = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.326165] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vault.timeout = 60.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.326332] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vault.use_ssl = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.326506] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.326678] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] keystone.cafile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.326840] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] keystone.certfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.327009] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] keystone.collect_timing = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.327180] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] keystone.connect_retries = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.327339] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] keystone.connect_retry_delay = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.327498] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] keystone.endpoint_override = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.327662] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] keystone.insecure = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.327821] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] keystone.keyfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.327979] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] keystone.max_version = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.328151] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] keystone.min_version = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.328318] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] keystone.region_name = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.328483] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] keystone.retriable_status_codes = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.328645] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] keystone.service_name = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.328816] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] keystone.service_type = identity {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.328982] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] keystone.split_loggers = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.329156] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] keystone.status_code_retries = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.329319] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] keystone.status_code_retry_delay = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.329512] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] keystone.timeout = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.329658] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.329824] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] keystone.version = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.330033] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.ceph_mount_options = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.330384] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.330594] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.connection_uri = {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.330765] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.cpu_mode = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.330936] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.cpu_model_extra_flags = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.331126] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.cpu_models = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.331304] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.cpu_power_governor_high = performance {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.331477] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.cpu_power_governor_low = powersave {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.331650] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.cpu_power_management = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.331812] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.332029] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.device_detach_attempts = 8 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.332164] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.device_detach_timeout = 20 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.332334] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.disk_cachemodes = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.332523] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.disk_prefix = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.332701] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.enabled_perf_events = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.332868] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.file_backed_memory = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.333048] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.gid_maps = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.333216] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.hw_disk_discard = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.333380] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.hw_machine_type = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.333581] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.images_rbd_ceph_conf = {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.333754] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.333915] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.334097] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.images_rbd_glance_store_name = {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.334274] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.images_rbd_pool = rbd {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.334449] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.images_type = default {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.334616] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.images_volume_group = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.334782] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.inject_key = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.334949] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.inject_partition = -2 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.335132] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.inject_password = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.335302] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.iscsi_iface = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.335468] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.iser_use_multipath = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.335636] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.live_migration_bandwidth = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.335802] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.335967] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.live_migration_downtime = 500 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.336148] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.336313] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.336479] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.live_migration_inbound_addr = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.336653] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.336817] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.live_migration_permit_post_copy = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.336979] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.live_migration_scheme = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.337170] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.live_migration_timeout_action = abort {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.337347] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.live_migration_tunnelled = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.337514] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.live_migration_uri = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.337680] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.live_migration_with_native_tls = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.337842] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.max_queues = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.338024] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.338390] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.338570] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.nfs_mount_options = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.338886] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.339083] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.339262] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.num_iser_scan_tries = 5 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.339429] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.num_memory_encrypted_guests = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.339622] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.339766] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.num_pcie_ports = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.339936] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.num_volume_scan_tries = 5 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.340118] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.pmem_namespaces = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.340283] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.quobyte_client_cfg = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.340617] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.340803] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.rbd_connect_timeout = 5 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.340977] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.341161] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.341329] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.rbd_secret_uuid = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.341494] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.rbd_user = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.341664] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.341842] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.remote_filesystem_transport = ssh {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.342024] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.rescue_image_id = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.342186] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.rescue_kernel_id = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.342348] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.rescue_ramdisk_id = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.342557] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.342726] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.rx_queue_size = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.342898] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.smbfs_mount_options = {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.343219] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.343407] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.snapshot_compression = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.343600] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.snapshot_image_format = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.343831] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.344013] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.sparse_logical_volumes = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.344190] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.swtpm_enabled = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.344370] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.swtpm_group = tss {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.344543] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.swtpm_user = tss {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.344759] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.sysinfo_serial = unique {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.344883] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.tb_cache_size = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.345057] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.tx_queue_size = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.345232] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.uid_maps = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.345397] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.use_virtio_for_bridges = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.345573] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.virt_type = kvm {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.345748] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.volume_clear = zero {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.345914] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.volume_clear_size = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.346099] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.volume_enforce_multipath = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.346271] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.volume_use_multipath = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.346434] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.vzstorage_cache_path = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.346610] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.346783] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.vzstorage_mount_group = qemu {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.346950] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.vzstorage_mount_opts = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.347139] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.347444] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.347633] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.vzstorage_mount_user = stack {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.347801] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.347978] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.auth_section = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.348171] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.auth_type = password {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.348334] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.cafile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.348494] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.certfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.348658] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.collect_timing = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.348814] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.connect_retries = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.348971] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.connect_retry_delay = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.349154] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.default_floating_pool = public {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.349316] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.endpoint_override = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.349479] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.extension_sync_interval = 600 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.349645] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.http_retries = 3 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.349807] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.insecure = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.349964] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.keyfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.350137] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.max_version = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.350320] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.350507] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.min_version = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.350683] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.ovs_bridge = br-int {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.350849] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.physnets = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.351028] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.region_name = RegionOne {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.351192] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.retriable_status_codes = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.351361] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.service_metadata_proxy = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.351518] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.service_name = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.351687] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.service_type = network {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.351855] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.split_loggers = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.352008] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.status_code_retries = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.352177] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.status_code_retry_delay = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.352336] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.timeout = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.352546] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.352717] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] neutron.version = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.352891] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] notifications.bdms_in_notifications = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.353084] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] notifications.default_level = INFO {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.353256] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] notifications.include_share_mapping = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.353458] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] notifications.notification_format = unversioned {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.353645] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] notifications.notify_on_state_change = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.353829] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.354020] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] pci.alias = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.354199] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] pci.device_spec = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.354368] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] pci.report_in_placement = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.354547] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.auth_section = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.354724] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.auth_type = password {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.354895] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.355072] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.cafile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.355236] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.certfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.355402] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.collect_timing = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.355563] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.connect_retries = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.355728] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.connect_retry_delay = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.355887] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.default_domain_id = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.356060] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.default_domain_name = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.356226] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.domain_id = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.356387] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.domain_name = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.356548] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.endpoint_override = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.356714] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.insecure = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.356872] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.keyfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.357040] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.max_version = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.357202] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.min_version = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.357374] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.password = **** {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.357534] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.project_domain_id = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.357705] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.project_domain_name = Default {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.357873] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.project_id = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.358059] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.project_name = service {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.358235] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.region_name = RegionOne {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.358398] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.retriable_status_codes = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.358563] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.service_name = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.358732] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.service_type = placement {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.358895] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.split_loggers = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.359069] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.status_code_retries = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.359235] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.status_code_retry_delay = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.359396] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.system_scope = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.359556] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.timeout = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.359719] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.trust_id = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.359878] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.user_domain_id = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.361115] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.user_domain_name = Default {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.361115] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.user_id = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.361115] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.username = nova {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.361115] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.361115] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] placement.version = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.361115] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] quota.cores = 20 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.361115] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] quota.count_usage_from_placement = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.361338] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.361433] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] quota.injected_file_content_bytes = 10240 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.361598] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] quota.injected_file_path_length = 255 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.361763] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] quota.injected_files = 5 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.361956] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] quota.instances = 10 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.362109] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] quota.key_pairs = 100 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.362280] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] quota.metadata_items = 128 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.362470] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] quota.ram = 51200 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.362641] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] quota.recheck_quota = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.362812] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] quota.server_group_members = 10 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.362979] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] quota.server_groups = 10 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.363207] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] quota.unified_limits_resource_list = ['servers'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.363387] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] quota.unified_limits_resource_strategy = require {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.363593] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.363767] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.363933] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] scheduler.image_metadata_prefilter = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.364111] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.364279] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] scheduler.max_attempts = 3 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.364444] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] scheduler.max_placement_results = 1000 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.364612] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.364774] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] scheduler.query_placement_for_image_type_support = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.364936] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.365125] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] scheduler.workers = 2 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.365310] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.365484] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.365669] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.365845] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.366021] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.366189] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.366351] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.366541] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.366711] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.host_subset_size = 1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.366876] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.367051] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.367221] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.image_props_weight_multiplier = 0.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.367389] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.image_props_weight_setting = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.367568] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.367738] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.isolated_hosts = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.367902] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.isolated_images = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.368074] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.368240] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.368408] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.368573] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.pci_in_placement = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.368737] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.368900] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.369075] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.369241] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.369404] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.369570] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.369733] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.track_instance_changes = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.369931] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.370094] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] metrics.required = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.370266] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] metrics.weight_multiplier = 1.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.370465] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.370643] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] metrics.weight_setting = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.370973] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.371168] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] serial_console.enabled = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.371351] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] serial_console.port_range = 10000:20000 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.371527] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.371700] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.371873] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] serial_console.serialproxy_port = 6083 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.372058] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] service_user.auth_section = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.372233] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] service_user.auth_type = password {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.372400] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] service_user.cafile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.372590] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] service_user.certfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.372754] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] service_user.collect_timing = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.372919] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] service_user.insecure = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.373091] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] service_user.keyfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.373268] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] service_user.send_service_user_token = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.373452] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] service_user.split_loggers = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.373629] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] service_user.timeout = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.373805] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] spice.agent_enabled = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.373971] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] spice.enabled = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.374304] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.374514] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.374692] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] spice.html5proxy_port = 6082 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.374856] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] spice.image_compression = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.375026] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] spice.jpeg_compression = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.375191] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] spice.playback_compression = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.375360] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] spice.require_secure = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.375531] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] spice.server_listen = 127.0.0.1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.375704] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.375993] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] spice.spice_direct_proxy_base_url = http://127.0.0.1:13002/nova {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.376179] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] spice.streaming_mode = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.376343] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] spice.zlib_compression = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.376514] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] upgrade_levels.baseapi = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.376689] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] upgrade_levels.compute = auto {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.376850] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] upgrade_levels.conductor = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.377014] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] upgrade_levels.scheduler = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.377188] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vendordata_dynamic_auth.auth_section = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.377352] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vendordata_dynamic_auth.auth_type = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.377514] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vendordata_dynamic_auth.cafile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.377674] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vendordata_dynamic_auth.certfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.377837] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.377997] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vendordata_dynamic_auth.insecure = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.378173] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vendordata_dynamic_auth.keyfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.378336] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.378497] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vendordata_dynamic_auth.timeout = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.378673] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vmware.api_retry_count = 10 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.378835] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vmware.ca_file = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.379023] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vmware.cache_prefix = devstack-image-cache {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.379197] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vmware.cluster_name = testcl1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.379365] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vmware.connection_pool_size = 10 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.379527] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vmware.console_delay_seconds = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.379697] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vmware.datastore_regex = ^datastore.* {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.379913] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.380105] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vmware.host_password = **** {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.380279] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vmware.host_port = 443 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.380476] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vmware.host_username = administrator@vsphere.local {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.380651] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vmware.insecure = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.380817] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vmware.integration_bridge = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.380982] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vmware.maximum_objects = 100 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.381159] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vmware.pbm_default_policy = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.381322] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vmware.pbm_enabled = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.381483] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vmware.pbm_wsdl_location = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.381656] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.381815] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vmware.serial_port_proxy_uri = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.381974] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vmware.serial_port_service_uri = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.382155] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vmware.task_poll_interval = 0.5 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.382328] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vmware.use_linked_clone = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.382527] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vmware.vnc_keymap = en-us {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.382707] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vmware.vnc_port = 5900 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.382869] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vmware.vnc_port_total = 10000 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.383071] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vnc.auth_schemes = ['none'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.383254] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vnc.enabled = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.383575] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.383773] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.383949] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vnc.novncproxy_port = 6080 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.384156] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vnc.server_listen = 127.0.0.1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.384344] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.384509] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vnc.vencrypt_ca_certs = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.384672] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vnc.vencrypt_client_cert = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.384832] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vnc.vencrypt_client_key = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.385013] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.385190] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] workarounds.disable_deep_image_inspection = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.385355] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.385520] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.385679] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.385840] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] workarounds.disable_rootwrap = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.386008] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] workarounds.enable_numa_live_migration = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.386180] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.386342] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.386504] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.386667] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] workarounds.libvirt_disable_apic = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.386826] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.386990] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.387168] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.387330] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.387494] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.387656] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.387815] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.387974] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.388149] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.388315] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.388500] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.388669] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] wsgi.secure_proxy_ssl_header = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.388833] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] zvm.ca_file = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.388994] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] zvm.cloud_connector_url = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.389488] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.389681] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] zvm.reachable_timeout = 300 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.389864] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.390063] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.390252] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] profiler.connection_string = messaging:// {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.390491] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] profiler.enabled = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.390696] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] profiler.es_doc_type = notification {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.390868] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] profiler.es_scroll_size = 10000 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.391054] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] profiler.es_scroll_time = 2m {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.391227] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] profiler.filter_error_trace = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.391397] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] profiler.hmac_keys = **** {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.391571] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] profiler.sentinel_service_name = mymaster {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.391741] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] profiler.socket_timeout = 0.1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.391906] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] profiler.trace_requests = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.392081] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] profiler.trace_sqlalchemy = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.392269] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] profiler_jaeger.process_tags = {} {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.392461] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] profiler_jaeger.service_name_prefix = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.392639] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] profiler_otlp.service_name_prefix = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.392819] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.392983] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.393161] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.393323] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.393516] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.393689] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.393854] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.394029] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.394204] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.394375] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.394559] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.394770] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.394943] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.395129] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.395298] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.kombu_reconnect_splay = 0.0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.395473] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.395640] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.395886] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.396089] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.396262] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.396429] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.396596] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.396762] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.396926] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.397109] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.397276] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.397438] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.397602] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.397764] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.397928] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.398101] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.ssl = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.398274] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.398446] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.398612] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.398784] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.398952] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.ssl_version = {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.399132] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.399326] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.399497] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_notifications.retry = -1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.399681] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.399856] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_messaging_notifications.transport_url = **** {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.400046] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.auth_section = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.400231] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.auth_type = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.400398] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.cafile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.400592] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.certfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.400764] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.collect_timing = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.400927] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.connect_retries = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.401103] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.connect_retry_delay = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.401270] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.endpoint_id = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.401448] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.endpoint_interface = publicURL {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.401613] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.endpoint_override = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.401776] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.endpoint_region_name = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.401936] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.endpoint_service_name = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.402110] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.endpoint_service_type = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.402278] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.insecure = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.402458] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.keyfile = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.402635] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.max_version = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.402797] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.min_version = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.402959] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.region_name = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.403138] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.retriable_status_codes = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.403302] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.service_name = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.403490] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.service_type = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.403672] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.split_loggers = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.403841] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.status_code_retries = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.404013] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.status_code_retry_delay = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.404183] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.timeout = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.404345] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.valid_interfaces = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.404505] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_limit.version = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.404676] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_reports.file_event_handler = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.404846] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.405014] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] oslo_reports.log_dir = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.405201] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.405366] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.405528] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.405701] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.405866] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.406040] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.406218] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.406384] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vif_plug_ovs_privileged.group = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.406546] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.406757] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.406931] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.407109] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] vif_plug_ovs_privileged.user = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.407286] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] os_vif_linux_bridge.flat_interface = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.407474] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.407654] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.407837] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.408020] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.408195] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.408368] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.408535] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.408719] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.408894] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] os_vif_ovs.isolate_vif = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.409077] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.409250] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.409424] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.409598] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] os_vif_ovs.ovsdb_interface = native {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.409761] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] os_vif_ovs.per_port_bridge = False {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.409933] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] privsep_osbrick.capabilities = [21] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.410109] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] privsep_osbrick.group = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.410322] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] privsep_osbrick.helper_command = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.410484] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.410666] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.410828] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] privsep_osbrick.user = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.411016] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.411181] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] nova_sys_admin.group = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.411338] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] nova_sys_admin.helper_command = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.411503] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.411667] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.411825] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] nova_sys_admin.user = None {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 603.411958] env[69927]: DEBUG oslo_service.backend.eventlet.service [None req-0865b712-4fc6-46b0-af2d-fcdf47b92fc5 None None] ******************************************************************************** {{(pid=69927) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 603.412392] env[69927]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 603.916087] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] Getting list of instances from cluster (obj){ [ 603.916087] env[69927]: value = "domain-c8" [ 603.916087] env[69927]: _type = "ClusterComputeResource" [ 603.916087] env[69927]: } {{(pid=69927) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 603.917215] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0dac737-7be7-460e-8945-e8c9797bbe13 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.926436] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] Got total of 0 instances {{(pid=69927) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 603.927055] env[69927]: WARNING nova.virt.vmwareapi.driver [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 603.927542] env[69927]: INFO nova.virt.node [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] Generated node identity 2f529b36-df5f-4b37-8103-68f74f737726 [ 603.927778] env[69927]: INFO nova.virt.node [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] Wrote node identity 2f529b36-df5f-4b37-8103-68f74f737726 to /opt/stack/data/n-cpu-1/compute_id [ 604.430879] env[69927]: WARNING nova.compute.manager [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] Compute nodes ['2f529b36-df5f-4b37-8103-68f74f737726'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 605.436631] env[69927]: INFO nova.compute.manager [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 606.442729] env[69927]: WARNING nova.compute.manager [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 606.443083] env[69927]: DEBUG oslo_concurrency.lockutils [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.443258] env[69927]: DEBUG oslo_concurrency.lockutils [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.443409] env[69927]: DEBUG oslo_concurrency.lockutils [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 606.443588] env[69927]: DEBUG nova.compute.resource_tracker [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69927) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 606.444537] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f3e1668-2f0b-4720-b167-2e37ada30d69 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.453374] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09471745-197a-410d-afa4-ce83868c2d91 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.468312] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e65f75-b842-40e1-998f-a16537663280 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.475189] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-666d5542-cadc-4f28-aa71-4be8132f01d7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.505971] env[69927]: DEBUG nova.compute.resource_tracker [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180856MB free_disk=18GB free_vcpus=48 pci_devices=None {{(pid=69927) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 606.506142] env[69927]: DEBUG oslo_concurrency.lockutils [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.506337] env[69927]: DEBUG oslo_concurrency.lockutils [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 607.009096] env[69927]: WARNING nova.compute.resource_tracker [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] No compute node record for cpu-1:2f529b36-df5f-4b37-8103-68f74f737726: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 2f529b36-df5f-4b37-8103-68f74f737726 could not be found. [ 607.513836] env[69927]: INFO nova.compute.resource_tracker [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 2f529b36-df5f-4b37-8103-68f74f737726 [ 609.023391] env[69927]: DEBUG nova.compute.resource_tracker [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 609.023391] env[69927]: DEBUG nova.compute.resource_tracker [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 609.189286] env[69927]: INFO nova.scheduler.client.report [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] [req-c89504be-ce8f-4b62-b1f4-3413ed901f0a] Created resource provider record via placement API for resource provider with UUID 2f529b36-df5f-4b37-8103-68f74f737726 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 609.209228] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f908305a-e6f6-4911-b719-bdb40e711e0f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.216225] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a13b3a2-a315-41fb-856e-b9e631bd0472 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.248164] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d95d985-98dc-44eb-a8b7-1a1fcabc4fac {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.257218] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442bfaaf-0789-4a90-96a1-8f9f1377eec4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.273479] env[69927]: DEBUG nova.compute.provider_tree [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 18, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 609.808380] env[69927]: DEBUG nova.scheduler.client.report [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] Updated inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 18, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 609.808609] env[69927]: DEBUG nova.compute.provider_tree [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] Updating resource provider 2f529b36-df5f-4b37-8103-68f74f737726 generation from 0 to 1 during operation: update_inventory {{(pid=69927) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 609.808750] env[69927]: DEBUG nova.compute.provider_tree [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 18, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 609.866163] env[69927]: DEBUG nova.compute.provider_tree [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] Updating resource provider 2f529b36-df5f-4b37-8103-68f74f737726 generation from 1 to 2 during operation: update_traits {{(pid=69927) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 610.370991] env[69927]: DEBUG nova.compute.resource_tracker [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69927) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 610.371264] env[69927]: DEBUG oslo_concurrency.lockutils [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.865s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 610.371401] env[69927]: DEBUG nova.service [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] Creating RPC server for service compute {{(pid=69927) start /opt/stack/nova/nova/service.py:177}} [ 610.384425] env[69927]: DEBUG nova.service [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] Join ServiceGroup membership for this service compute {{(pid=69927) start /opt/stack/nova/nova/service.py:194}} [ 610.384610] env[69927]: DEBUG nova.servicegroup.drivers.db [None req-83d37989-9b77-4d33-81d7-ba0efa252077 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=69927) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 634.386704] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._sync_power_states {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 634.890204] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Getting list of instances from cluster (obj){ [ 634.890204] env[69927]: value = "domain-c8" [ 634.890204] env[69927]: _type = "ClusterComputeResource" [ 634.890204] env[69927]: } {{(pid=69927) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 634.891494] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d96658-09ea-4b56-95a6-752200f1afd6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.899772] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Got total of 0 instances {{(pid=69927) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 634.899997] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 634.900310] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Getting list of instances from cluster (obj){ [ 634.900310] env[69927]: value = "domain-c8" [ 634.900310] env[69927]: _type = "ClusterComputeResource" [ 634.900310] env[69927]: } {{(pid=69927) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 634.901169] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4372fb89-3f79-421e-a9a3-8faa3b3e3d45 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.908296] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Got total of 0 instances {{(pid=69927) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 651.631348] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Acquiring lock "4ad26720-ed24-4963-9519-3345dbfeb9a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.631348] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Lock "4ad26720-ed24-4963-9519-3345dbfeb9a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 651.775222] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Acquiring lock "820c50b9-3c18-41bc-a000-22425b1dbb27" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.775339] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Lock "820c50b9-3c18-41bc-a000-22425b1dbb27" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 652.135428] env[69927]: DEBUG nova.compute.manager [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 652.278437] env[69927]: DEBUG nova.compute.manager [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 652.679508] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 652.680026] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 652.684204] env[69927]: INFO nova.compute.claims [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 652.812941] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 652.925428] env[69927]: DEBUG oslo_concurrency.lockutils [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Acquiring lock "053f6f00-a818-473b-a887-4ec45174c1d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 652.925636] env[69927]: DEBUG oslo_concurrency.lockutils [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Lock "053f6f00-a818-473b-a887-4ec45174c1d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.429840] env[69927]: DEBUG nova.compute.manager [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 653.777464] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60bb29c-8780-4d1f-a045-6322ff194817 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.788216] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46755996-5e75-463b-811c-452f32188ec2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.835731] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b8c07ba-8411-4176-bca3-f7b4de9bad5f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.844950] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03295b30-9c4b-43e0-990d-ae34dd028f2b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.864386] env[69927]: DEBUG nova.compute.provider_tree [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 653.965427] env[69927]: DEBUG oslo_concurrency.lockutils [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 654.371071] env[69927]: DEBUG nova.scheduler.client.report [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 18, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 654.873475] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.193s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 654.880121] env[69927]: DEBUG nova.compute.manager [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 654.880121] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.066s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 654.881908] env[69927]: INFO nova.compute.claims [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 655.389092] env[69927]: DEBUG nova.compute.utils [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 655.395918] env[69927]: DEBUG nova.compute.manager [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Not allocating networking since 'none' was specified. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 655.878034] env[69927]: DEBUG oslo_concurrency.lockutils [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Acquiring lock "c3c36508-96e1-4e75-931b-c7f0740b74e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 655.879535] env[69927]: DEBUG oslo_concurrency.lockutils [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Lock "c3c36508-96e1-4e75-931b-c7f0740b74e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.897671] env[69927]: DEBUG nova.compute.manager [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 656.068738] env[69927]: DEBUG oslo_concurrency.lockutils [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Acquiring lock "2cdfda66-1d93-4960-a129-2788f10fa593" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.068988] env[69927]: DEBUG oslo_concurrency.lockutils [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Lock "2cdfda66-1d93-4960-a129-2788f10fa593" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.070687] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c35b63-41d8-4f30-9534-ec3a3eca7edf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.080707] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f426bb-1eed-4380-b133-f21b9e05bf0e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.129210] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be5f3895-a58c-47a8-ac40-6a5e2b0c3490 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.140607] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26085118-f9aa-4673-97db-4ccdb00f7e87 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.160479] env[69927]: DEBUG nova.compute.provider_tree [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 656.384633] env[69927]: DEBUG nova.compute.manager [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 656.394885] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "9348e368-cc3c-4bde-91ae-26fd03ad536a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.395173] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "9348e368-cc3c-4bde-91ae-26fd03ad536a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.575295] env[69927]: DEBUG nova.compute.manager [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 656.665059] env[69927]: DEBUG nova.scheduler.client.report [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 18, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 656.710525] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Acquiring lock "c45d2259-2a05-49d5-81eb-4c79ced83121" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.710525] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Lock "c45d2259-2a05-49d5-81eb-4c79ced83121" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.901426] env[69927]: DEBUG nova.compute.manager [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 656.911830] env[69927]: DEBUG nova.compute.manager [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 656.923660] env[69927]: DEBUG oslo_concurrency.lockutils [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.115166] env[69927]: DEBUG oslo_concurrency.lockutils [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.173485] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.294s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.174669] env[69927]: DEBUG nova.compute.manager [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 657.178591] env[69927]: DEBUG oslo_concurrency.lockutils [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.213s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.179657] env[69927]: INFO nova.compute.claims [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 657.214730] env[69927]: DEBUG nova.compute.manager [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 657.433109] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.689347] env[69927]: DEBUG nova.compute.utils [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 657.689347] env[69927]: DEBUG nova.compute.manager [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 657.689347] env[69927]: DEBUG nova.network.neutron [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 657.729410] env[69927]: DEBUG nova.virt.hardware [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 657.729612] env[69927]: DEBUG nova.virt.hardware [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 657.730346] env[69927]: DEBUG nova.virt.hardware [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 657.730346] env[69927]: DEBUG nova.virt.hardware [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 657.730346] env[69927]: DEBUG nova.virt.hardware [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 657.730346] env[69927]: DEBUG nova.virt.hardware [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 657.730551] env[69927]: DEBUG nova.virt.hardware [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 657.730687] env[69927]: DEBUG nova.virt.hardware [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 657.731190] env[69927]: DEBUG nova.virt.hardware [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 657.731360] env[69927]: DEBUG nova.virt.hardware [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 657.731529] env[69927]: DEBUG nova.virt.hardware [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 657.732831] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07866b8e-5cd2-4e54-8e44-ff0a7a060cdd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.750145] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-698732fb-7206-49ae-9eb1-ef592a2239b6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.755874] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.771620] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac2eeb41-cd87-4309-ab81-b4ec7ff969eb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.799957] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Instance VIF info [] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 657.811115] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 657.812193] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6b82b840-a0f7-4cc4-b894-a123849aae71 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.829125] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Created folder: OpenStack in parent group-v4. [ 657.829125] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Creating folder: Project (94ab114dcf1348ab8c17d2e533e265d0). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 657.829359] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-06cc55f3-0c62-4f5a-9326-965d13fe9d97 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.851520] env[69927]: DEBUG nova.policy [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '24f77f0ec78b470e859d17e2a008f612', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b98c310beb7448918cec1f5a3ebe61f9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 657.855145] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Created folder: Project (94ab114dcf1348ab8c17d2e533e265d0) in parent group-v811283. [ 657.855379] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Creating folder: Instances. Parent ref: group-v811284. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 657.855738] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d4231651-f0ea-4775-84c6-9621c5176088 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.869996] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Created folder: Instances in parent group-v811284. [ 657.869996] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 657.871018] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 657.871301] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cdd75e21-7f53-4ea6-8868-9e54091a5583 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.899033] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 657.899033] env[69927]: value = "task-4095333" [ 657.899033] env[69927]: _type = "Task" [ 657.899033] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.912978] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095333, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.198929] env[69927]: DEBUG nova.compute.manager [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 658.379441] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cddf3de-bdff-491d-aa2f-f4d824f87a8a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.388753] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ed1ce8-b29e-4012-922c-09d2c8d726fd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.433147] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e8567e2-d5e5-4a0a-8a9f-71e1a16028cc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.442595] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095333, 'name': CreateVM_Task, 'duration_secs': 0.361888} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.445491] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 658.446739] env[69927]: DEBUG oslo_vmware.service [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a22c218-ac9c-4bfd-be21-c9c3018144a0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.457812] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.457990] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 658.459540] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 658.459540] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d43de14b-22eb-47cc-b811-1c038c2616ee {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.462162] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13382fde-a5b1-4900-aff7-f591aac3b7e8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.470513] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Waiting for the task: (returnval){ [ 658.470513] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e658ff-fb80-0a2a-ed72-e8109b19177a" [ 658.470513] env[69927]: _type = "Task" [ 658.470513] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.480943] env[69927]: DEBUG nova.compute.provider_tree [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 658.495796] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 658.495971] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 658.496219] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.496357] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 658.496768] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 658.497755] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d40e87f1-47c3-496f-8d2a-ecd91fd90ba8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.515553] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 658.517147] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 658.517764] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 658.517925] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 658.518189] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 658.519184] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59dfb350-ffa8-4c5b-addb-440326766c45 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.521681] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 658.521910] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 658.522524] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 658.522723] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 658.522889] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69927) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 658.523073] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager.update_available_resource {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 658.526913] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-797c78c1-c979-4bda-9dd4-7abc7ae75e3d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.533982] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Waiting for the task: (returnval){ [ 658.533982] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f844c0-62c4-a637-b057-95e880b8cccc" [ 658.533982] env[69927]: _type = "Task" [ 658.533982] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.551274] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Preparing fetch location {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 658.551323] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Creating directory with path [datastore1] vmware_temp/4935446a-defd-4116-8120-a11fd36adb0f/f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 658.552107] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b23ccc3f-c98d-47a0-ac01-656b3f48302b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.584810] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Created directory with path [datastore1] vmware_temp/4935446a-defd-4116-8120-a11fd36adb0f/f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 658.585058] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Fetch image to [datastore1] vmware_temp/4935446a-defd-4116-8120-a11fd36adb0f/f524494e-9179-4b3e-a3e2-782f019def24/tmp-sparse.vmdk {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 658.585237] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Downloading image file data f524494e-9179-4b3e-a3e2-782f019def24 to [datastore1] vmware_temp/4935446a-defd-4116-8120-a11fd36adb0f/f524494e-9179-4b3e-a3e2-782f019def24/tmp-sparse.vmdk on the data store datastore1 {{(pid=69927) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 658.586515] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-243318ed-a656-449b-926b-deae32f4ef2e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.594900] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-514cbfe5-3d4e-44d8-a9be-f43e9cea0b68 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.605867] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb908677-e288-45cc-a9ba-65c770e39b59 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.646554] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-744dbcb4-f39b-4740-80ef-5d6cdfc37714 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.655899] env[69927]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7f23e067-b930-46af-8273-5a3b531259bc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.692697] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Downloading image file data f524494e-9179-4b3e-a3e2-782f019def24 to the data store datastore1 {{(pid=69927) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 658.769620] env[69927]: DEBUG oslo_vmware.rw_handles [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4935446a-defd-4116-8120-a11fd36adb0f/f524494e-9179-4b3e-a3e2-782f019def24/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69927) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 658.990049] env[69927]: DEBUG nova.scheduler.client.report [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 18, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 659.028766] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.085154] env[69927]: DEBUG nova.network.neutron [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Successfully created port: ea8b5988-95e7-4a5b-b062-03d4d72eb9e5 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 659.124626] env[69927]: DEBUG oslo_concurrency.lockutils [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Acquiring lock "7ce79e41-333a-4ef3-ba68-f74067d4ac5a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.125191] env[69927]: DEBUG oslo_concurrency.lockutils [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Lock "7ce79e41-333a-4ef3-ba68-f74067d4ac5a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 659.216256] env[69927]: DEBUG nova.compute.manager [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 659.255305] env[69927]: DEBUG nova.virt.hardware [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 659.259282] env[69927]: DEBUG nova.virt.hardware [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 659.259282] env[69927]: DEBUG nova.virt.hardware [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 659.259282] env[69927]: DEBUG nova.virt.hardware [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 659.259282] env[69927]: DEBUG nova.virt.hardware [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 659.259282] env[69927]: DEBUG nova.virt.hardware [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 659.259505] env[69927]: DEBUG nova.virt.hardware [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 659.259505] env[69927]: DEBUG nova.virt.hardware [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 659.259505] env[69927]: DEBUG nova.virt.hardware [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 659.259505] env[69927]: DEBUG nova.virt.hardware [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 659.259505] env[69927]: DEBUG nova.virt.hardware [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 659.259662] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-708449ad-8252-4cfa-b117-73bae01bbcdc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.276959] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bdca4cf-1f61-4c6c-b1b8-de4d5a908570 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.499025] env[69927]: DEBUG oslo_concurrency.lockutils [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.321s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 659.500132] env[69927]: DEBUG nova.compute.manager [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 659.505719] env[69927]: DEBUG oslo_concurrency.lockutils [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.582s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 659.507678] env[69927]: INFO nova.compute.claims [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 659.631674] env[69927]: DEBUG nova.compute.manager [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 659.672594] env[69927]: DEBUG oslo_vmware.rw_handles [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Completed reading data from the image iterator. {{(pid=69927) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 659.672840] env[69927]: DEBUG oslo_vmware.rw_handles [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4935446a-defd-4116-8120-a11fd36adb0f/f524494e-9179-4b3e-a3e2-782f019def24/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69927) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 659.728245] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Downloaded image file data f524494e-9179-4b3e-a3e2-782f019def24 to vmware_temp/4935446a-defd-4116-8120-a11fd36adb0f/f524494e-9179-4b3e-a3e2-782f019def24/tmp-sparse.vmdk on the data store datastore1 {{(pid=69927) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 659.729365] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Caching image {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 659.729563] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Copying Virtual Disk [datastore1] vmware_temp/4935446a-defd-4116-8120-a11fd36adb0f/f524494e-9179-4b3e-a3e2-782f019def24/tmp-sparse.vmdk to [datastore1] vmware_temp/4935446a-defd-4116-8120-a11fd36adb0f/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 659.729854] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9b278faf-1ccb-4560-a108-80f9bf4bfc7f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.739679] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Waiting for the task: (returnval){ [ 659.739679] env[69927]: value = "task-4095338" [ 659.739679] env[69927]: _type = "Task" [ 659.739679] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.748724] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': task-4095338, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.012249] env[69927]: DEBUG nova.compute.utils [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 660.019169] env[69927]: DEBUG nova.compute.manager [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 660.019169] env[69927]: DEBUG nova.network.neutron [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 660.167914] env[69927]: DEBUG oslo_concurrency.lockutils [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 660.228154] env[69927]: DEBUG nova.policy [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '56f1cecf90004886bd7b3596ea39811e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b0f8526f19884878a1ca5cb662729d7a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 660.261885] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': task-4095338, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.519870] env[69927]: DEBUG nova.compute.manager [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 660.763307] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': task-4095338, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.728048} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.763747] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Copied Virtual Disk [datastore1] vmware_temp/4935446a-defd-4116-8120-a11fd36adb0f/f524494e-9179-4b3e-a3e2-782f019def24/tmp-sparse.vmdk to [datastore1] vmware_temp/4935446a-defd-4116-8120-a11fd36adb0f/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 660.764124] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Deleting the datastore file [datastore1] vmware_temp/4935446a-defd-4116-8120-a11fd36adb0f/f524494e-9179-4b3e-a3e2-782f019def24/tmp-sparse.vmdk {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 660.764488] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df0d7c6d-51e0-4fe4-b6bf-3eb5b6b80e9a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.776915] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Waiting for the task: (returnval){ [ 660.776915] env[69927]: value = "task-4095339" [ 660.776915] env[69927]: _type = "Task" [ 660.776915] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.794025] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': task-4095339, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.818393] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ea1bbb-8929-4668-a340-b385a8ccace2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.830705] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c3ba2b9-1722-4f59-ac81-ce634928f410 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.887755] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c38ec15-68e2-4338-bd7a-27bfd7ba8a22 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.907559] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab44433e-5067-44d8-b440-b37c74c16b1f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.927171] env[69927]: DEBUG nova.compute.provider_tree [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 661.293669] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': task-4095339, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.032307} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.294188] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 661.294456] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Moving file from [datastore1] vmware_temp/4935446a-defd-4116-8120-a11fd36adb0f/f524494e-9179-4b3e-a3e2-782f019def24 to [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24. {{(pid=69927) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 661.294769] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-5f196287-dc99-4ee3-ab62-98be8923d84b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.309147] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Waiting for the task: (returnval){ [ 661.309147] env[69927]: value = "task-4095340" [ 661.309147] env[69927]: _type = "Task" [ 661.309147] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.328520] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': task-4095340, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.461147] env[69927]: ERROR nova.scheduler.client.report [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [req-06c7d7b2-293f-4029-9a33-c85af4f952c2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2f529b36-df5f-4b37-8103-68f74f737726. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-06c7d7b2-293f-4029-9a33-c85af4f952c2"}]} [ 661.493415] env[69927]: DEBUG nova.scheduler.client.report [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Refreshing inventories for resource provider 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 661.515472] env[69927]: DEBUG nova.scheduler.client.report [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Updating ProviderTree inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 18, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 661.515915] env[69927]: DEBUG nova.compute.provider_tree [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 18, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 661.532850] env[69927]: DEBUG nova.scheduler.client.report [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Refreshing aggregate associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, aggregates: None {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 661.540931] env[69927]: DEBUG nova.compute.manager [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 661.561947] env[69927]: DEBUG nova.scheduler.client.report [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Refreshing trait associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 661.580776] env[69927]: DEBUG nova.virt.hardware [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 661.581066] env[69927]: DEBUG nova.virt.hardware [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 661.581281] env[69927]: DEBUG nova.virt.hardware [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 661.581504] env[69927]: DEBUG nova.virt.hardware [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 661.581688] env[69927]: DEBUG nova.virt.hardware [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 661.581873] env[69927]: DEBUG nova.virt.hardware [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 661.582433] env[69927]: DEBUG nova.virt.hardware [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 661.583226] env[69927]: DEBUG nova.virt.hardware [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 661.584220] env[69927]: DEBUG nova.virt.hardware [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 661.584555] env[69927]: DEBUG nova.virt.hardware [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 661.584642] env[69927]: DEBUG nova.virt.hardware [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 661.585878] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-880661b9-94b9-4c73-bef6-600166e11b59 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.605250] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce573efc-45ae-40d1-ab93-352d5eea2de8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.755518] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa91b953-12c6-4f7c-9d2f-97623a64e44f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.768023] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d751fedf-bbe4-48f7-bbd2-6c9429ffcb58 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.799960] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df11d6ce-39fe-4b62-baac-337ef425d1c3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.812611] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-044d7cf5-c0b6-46c1-9de3-fc47a97d12fd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.832752] env[69927]: DEBUG nova.compute.provider_tree [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 661.838877] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': task-4095340, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.053384} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.838877] env[69927]: DEBUG nova.network.neutron [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Successfully created port: 8ed32df7-4147-4bf4-bedd-196b87bbea4d {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 661.841140] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] File moved {{(pid=69927) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 661.843020] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Cleaning up location [datastore1] vmware_temp/4935446a-defd-4116-8120-a11fd36adb0f {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 661.843020] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Deleting the datastore file [datastore1] vmware_temp/4935446a-defd-4116-8120-a11fd36adb0f {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 661.843020] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce710b52-6eff-40d0-8ca7-d9e072c87870 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.851656] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Waiting for the task: (returnval){ [ 661.851656] env[69927]: value = "task-4095342" [ 661.851656] env[69927]: _type = "Task" [ 661.851656] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.867156] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': task-4095342, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.366927] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': task-4095342, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.047033} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.367385] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 662.368483] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8aeeb790-7789-411b-a980-d40073f9f5c8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.376510] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Waiting for the task: (returnval){ [ 662.376510] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]524beab7-4b28-e19a-071c-5ab577ab4c04" [ 662.376510] env[69927]: _type = "Task" [ 662.376510] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.383673] env[69927]: DEBUG nova.scheduler.client.report [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Updated inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 with generation 12 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 662.384607] env[69927]: DEBUG nova.compute.provider_tree [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Updating resource provider 2f529b36-df5f-4b37-8103-68f74f737726 generation from 12 to 13 during operation: update_inventory {{(pid=69927) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 662.384607] env[69927]: DEBUG nova.compute.provider_tree [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 662.399830] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]524beab7-4b28-e19a-071c-5ab577ab4c04, 'name': SearchDatastore_Task, 'duration_secs': 0.011632} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.399830] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 662.399830] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 4ad26720-ed24-4963-9519-3345dbfeb9a2/4ad26720-ed24-4963-9519-3345dbfeb9a2.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 662.399830] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3564ed08-6cdf-410a-bed2-ea896342a0a4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.408881] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Waiting for the task: (returnval){ [ 662.408881] env[69927]: value = "task-4095343" [ 662.408881] env[69927]: _type = "Task" [ 662.408881] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.419516] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': task-4095343, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.805774] env[69927]: DEBUG nova.network.neutron [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Successfully updated port: ea8b5988-95e7-4a5b-b062-03d4d72eb9e5 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 662.890299] env[69927]: DEBUG oslo_concurrency.lockutils [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.385s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 662.890861] env[69927]: DEBUG nova.compute.manager [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 662.895284] env[69927]: DEBUG oslo_concurrency.lockutils [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.780s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 662.896545] env[69927]: INFO nova.compute.claims [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 662.925034] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': task-4095343, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.310112] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Acquiring lock "refresh_cache-820c50b9-3c18-41bc-a000-22425b1dbb27" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.312203] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Acquired lock "refresh_cache-820c50b9-3c18-41bc-a000-22425b1dbb27" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 663.312203] env[69927]: DEBUG nova.network.neutron [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 663.402767] env[69927]: DEBUG nova.compute.utils [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 663.410020] env[69927]: DEBUG nova.compute.manager [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 663.410020] env[69927]: DEBUG nova.network.neutron [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 663.425075] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': task-4095343, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.540164} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.425939] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 4ad26720-ed24-4963-9519-3345dbfeb9a2/4ad26720-ed24-4963-9519-3345dbfeb9a2.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 663.425939] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 663.426220] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-211c64d4-24a5-417c-9e49-bdb45c935298 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.435560] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Waiting for the task: (returnval){ [ 663.435560] env[69927]: value = "task-4095344" [ 663.435560] env[69927]: _type = "Task" [ 663.435560] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.452581] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': task-4095344, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.548017] env[69927]: DEBUG nova.policy [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c30b7b75bed4b6e886abb072a729792', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cf8f9d710b8d4390b0c0bc0c25c1aac6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 663.912020] env[69927]: DEBUG nova.compute.manager [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 663.959786] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': task-4095344, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080509} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.960615] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 663.962962] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef5df1bb-2a7a-4987-9897-087430ffab58 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.000967] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] 4ad26720-ed24-4963-9519-3345dbfeb9a2/4ad26720-ed24-4963-9519-3345dbfeb9a2.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 664.002421] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db05602b-7c49-4512-abf8-bf7ec1791a9f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.029318] env[69927]: DEBUG nova.network.neutron [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 664.040842] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Waiting for the task: (returnval){ [ 664.040842] env[69927]: value = "task-4095346" [ 664.040842] env[69927]: _type = "Task" [ 664.040842] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.051545] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': task-4095346, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.154957] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de423b9-8e00-4ab9-bb2e-1191db489a19 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.163883] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d329892-8e8f-47c1-954e-3c8c8181cc1c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.210746] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d64ea9fe-69b4-48f1-9819-18fd59fee39e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.221969] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe1b4db-5049-480f-8fb6-e0128b93e9fd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.247686] env[69927]: DEBUG nova.compute.provider_tree [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 664.361140] env[69927]: DEBUG nova.compute.manager [req-3ce5aa5d-d8a1-4109-9ddb-6434971bdd50 req-0c168aa8-8ab7-47e2-a1fd-f1e614a9aed4 service nova] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Received event network-vif-plugged-ea8b5988-95e7-4a5b-b062-03d4d72eb9e5 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 664.361140] env[69927]: DEBUG oslo_concurrency.lockutils [req-3ce5aa5d-d8a1-4109-9ddb-6434971bdd50 req-0c168aa8-8ab7-47e2-a1fd-f1e614a9aed4 service nova] Acquiring lock "820c50b9-3c18-41bc-a000-22425b1dbb27-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 664.361140] env[69927]: DEBUG oslo_concurrency.lockutils [req-3ce5aa5d-d8a1-4109-9ddb-6434971bdd50 req-0c168aa8-8ab7-47e2-a1fd-f1e614a9aed4 service nova] Lock "820c50b9-3c18-41bc-a000-22425b1dbb27-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 664.361140] env[69927]: DEBUG oslo_concurrency.lockutils [req-3ce5aa5d-d8a1-4109-9ddb-6434971bdd50 req-0c168aa8-8ab7-47e2-a1fd-f1e614a9aed4 service nova] Lock "820c50b9-3c18-41bc-a000-22425b1dbb27-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 664.361421] env[69927]: DEBUG nova.compute.manager [req-3ce5aa5d-d8a1-4109-9ddb-6434971bdd50 req-0c168aa8-8ab7-47e2-a1fd-f1e614a9aed4 service nova] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] No waiting events found dispatching network-vif-plugged-ea8b5988-95e7-4a5b-b062-03d4d72eb9e5 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 664.361557] env[69927]: WARNING nova.compute.manager [req-3ce5aa5d-d8a1-4109-9ddb-6434971bdd50 req-0c168aa8-8ab7-47e2-a1fd-f1e614a9aed4 service nova] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Received unexpected event network-vif-plugged-ea8b5988-95e7-4a5b-b062-03d4d72eb9e5 for instance with vm_state building and task_state spawning. [ 664.558653] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': task-4095346, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.750589] env[69927]: DEBUG nova.scheduler.client.report [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 664.929937] env[69927]: DEBUG nova.compute.manager [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 664.968310] env[69927]: DEBUG nova.virt.hardware [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 664.968761] env[69927]: DEBUG nova.virt.hardware [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 664.969138] env[69927]: DEBUG nova.virt.hardware [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 664.970074] env[69927]: DEBUG nova.virt.hardware [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 664.970334] env[69927]: DEBUG nova.virt.hardware [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 664.970640] env[69927]: DEBUG nova.virt.hardware [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 664.971870] env[69927]: DEBUG nova.virt.hardware [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 664.972509] env[69927]: DEBUG nova.virt.hardware [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 664.972744] env[69927]: DEBUG nova.virt.hardware [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 664.974109] env[69927]: DEBUG nova.virt.hardware [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 664.974179] env[69927]: DEBUG nova.virt.hardware [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 664.975167] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7042581-6f14-438f-9a95-ebe1c29f851c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.985129] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc2da6a-a998-46ba-a94c-fe15abfaa2e0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.054449] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': task-4095346, 'name': ReconfigVM_Task, 'duration_secs': 0.547374} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.054640] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Reconfigured VM instance instance-00000001 to attach disk [datastore1] 4ad26720-ed24-4963-9519-3345dbfeb9a2/4ad26720-ed24-4963-9519-3345dbfeb9a2.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 665.055362] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8c5d7c2a-036e-4a2a-a9c8-13ab4ccc56d6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.062983] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Waiting for the task: (returnval){ [ 665.062983] env[69927]: value = "task-4095347" [ 665.062983] env[69927]: _type = "Task" [ 665.062983] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.073487] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': task-4095347, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.144535] env[69927]: DEBUG nova.network.neutron [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Updating instance_info_cache with network_info: [{"id": "ea8b5988-95e7-4a5b-b062-03d4d72eb9e5", "address": "fa:16:3e:a6:9a:f6", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.212", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea8b5988-95", "ovs_interfaceid": "ea8b5988-95e7-4a5b-b062-03d4d72eb9e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.258513] env[69927]: DEBUG oslo_concurrency.lockutils [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.363s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 665.258513] env[69927]: DEBUG nova.compute.manager [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 665.274603] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.833s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.279284] env[69927]: INFO nova.compute.claims [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 665.363453] env[69927]: DEBUG nova.network.neutron [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Successfully updated port: 8ed32df7-4147-4bf4-bedd-196b87bbea4d {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 665.572870] env[69927]: DEBUG nova.network.neutron [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Successfully created port: 4df42787-eead-48f3-a537-5f3f2a36a836 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 665.583985] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': task-4095347, 'name': Rename_Task, 'duration_secs': 0.139032} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.583985] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 665.584169] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-62b2aae8-3963-4b6e-a20d-fbbcda841362 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.591837] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Waiting for the task: (returnval){ [ 665.591837] env[69927]: value = "task-4095348" [ 665.591837] env[69927]: _type = "Task" [ 665.591837] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.614060] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': task-4095348, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.647488] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Releasing lock "refresh_cache-820c50b9-3c18-41bc-a000-22425b1dbb27" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 665.648193] env[69927]: DEBUG nova.compute.manager [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Instance network_info: |[{"id": "ea8b5988-95e7-4a5b-b062-03d4d72eb9e5", "address": "fa:16:3e:a6:9a:f6", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.212", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea8b5988-95", "ovs_interfaceid": "ea8b5988-95e7-4a5b-b062-03d4d72eb9e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 665.649769] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:9a:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '10b81051-1eb1-406b-888c-4548c470c77e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ea8b5988-95e7-4a5b-b062-03d4d72eb9e5', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 665.665091] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Creating folder: Project (b98c310beb7448918cec1f5a3ebe61f9). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 665.665509] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4397f40d-71e8-4b52-96f7-4097640a16b7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.680377] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Created folder: Project (b98c310beb7448918cec1f5a3ebe61f9) in parent group-v811283. [ 665.680601] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Creating folder: Instances. Parent ref: group-v811290. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 665.681485] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c16c3871-350d-4c88-9e21-eb3045869357 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.692914] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Created folder: Instances in parent group-v811290. [ 665.693344] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 665.693580] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 665.693963] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0cd03001-0ed1-428e-9f3f-d1e149807b0c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.720525] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 665.720525] env[69927]: value = "task-4095352" [ 665.720525] env[69927]: _type = "Task" [ 665.720525] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.730195] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095352, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.782834] env[69927]: DEBUG nova.compute.utils [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 665.784900] env[69927]: DEBUG nova.compute.manager [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 665.785139] env[69927]: DEBUG nova.network.neutron [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 665.868442] env[69927]: DEBUG oslo_concurrency.lockutils [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Acquiring lock "refresh_cache-053f6f00-a818-473b-a887-4ec45174c1d5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.868442] env[69927]: DEBUG oslo_concurrency.lockutils [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Acquired lock "refresh_cache-053f6f00-a818-473b-a887-4ec45174c1d5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.868442] env[69927]: DEBUG nova.network.neutron [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 666.057802] env[69927]: DEBUG nova.policy [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '40ce8fc2a7ea40f6ad2751617a40198f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '731443ca96b749c1861bf82f6f36c238', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 666.104250] env[69927]: DEBUG oslo_vmware.api [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': task-4095348, 'name': PowerOnVM_Task, 'duration_secs': 0.497443} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.104544] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 666.104966] env[69927]: INFO nova.compute.manager [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Took 9.19 seconds to spawn the instance on the hypervisor. [ 666.105312] env[69927]: DEBUG nova.compute.manager [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 666.106172] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d56904-e9f6-4a34-bb6f-6493a7d21431 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.235749] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095352, 'name': CreateVM_Task, 'duration_secs': 0.405193} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.236034] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 666.266678] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.266678] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 666.266678] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 666.267355] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e622a5a0-8f01-4176-a8c3-79980f029387 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.274692] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Waiting for the task: (returnval){ [ 666.274692] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d2925c-53f8-121f-16ec-55004e13bfce" [ 666.274692] env[69927]: _type = "Task" [ 666.274692] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.291328] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d2925c-53f8-121f-16ec-55004e13bfce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.293066] env[69927]: DEBUG nova.compute.manager [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 666.454269] env[69927]: DEBUG nova.network.neutron [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 666.480805] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e447e0-89a9-487f-bde7-34c6e2521834 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.491931] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7efd5135-9a8a-47b4-a943-914c39128afe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.498821] env[69927]: DEBUG nova.compute.manager [req-276e640c-6eca-4ffb-9fdb-217c87238cef req-8dd786ca-20f7-441b-82be-379dc6e2acd7 service nova] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Received event network-vif-plugged-8ed32df7-4147-4bf4-bedd-196b87bbea4d {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 666.499157] env[69927]: DEBUG oslo_concurrency.lockutils [req-276e640c-6eca-4ffb-9fdb-217c87238cef req-8dd786ca-20f7-441b-82be-379dc6e2acd7 service nova] Acquiring lock "053f6f00-a818-473b-a887-4ec45174c1d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 666.499267] env[69927]: DEBUG oslo_concurrency.lockutils [req-276e640c-6eca-4ffb-9fdb-217c87238cef req-8dd786ca-20f7-441b-82be-379dc6e2acd7 service nova] Lock "053f6f00-a818-473b-a887-4ec45174c1d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 666.499424] env[69927]: DEBUG oslo_concurrency.lockutils [req-276e640c-6eca-4ffb-9fdb-217c87238cef req-8dd786ca-20f7-441b-82be-379dc6e2acd7 service nova] Lock "053f6f00-a818-473b-a887-4ec45174c1d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 666.500450] env[69927]: DEBUG nova.compute.manager [req-276e640c-6eca-4ffb-9fdb-217c87238cef req-8dd786ca-20f7-441b-82be-379dc6e2acd7 service nova] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] No waiting events found dispatching network-vif-plugged-8ed32df7-4147-4bf4-bedd-196b87bbea4d {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 666.500450] env[69927]: WARNING nova.compute.manager [req-276e640c-6eca-4ffb-9fdb-217c87238cef req-8dd786ca-20f7-441b-82be-379dc6e2acd7 service nova] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Received unexpected event network-vif-plugged-8ed32df7-4147-4bf4-bedd-196b87bbea4d for instance with vm_state building and task_state spawning. [ 666.537735] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19cacfac-a208-4f3f-9408-1d61c11a1d5a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.548659] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ce5930-0d5a-4c44-8632-5861d8ac9f8b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.568442] env[69927]: DEBUG nova.compute.provider_tree [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 666.632822] env[69927]: INFO nova.compute.manager [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Took 13.99 seconds to build instance. [ 666.794030] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d2925c-53f8-121f-16ec-55004e13bfce, 'name': SearchDatastore_Task, 'duration_secs': 0.046898} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.794030] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 666.794030] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 666.794030] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.794441] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 666.794441] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 666.794441] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6245eb6c-fe18-475c-b747-ae2c4ab8eb87 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.807546] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 666.807678] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 666.808450] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe4edd86-cfc9-4e03-b803-de1e545fccd9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.815229] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Waiting for the task: (returnval){ [ 666.815229] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52822e26-d690-ac6f-19d8-23db8367cf3c" [ 666.815229] env[69927]: _type = "Task" [ 666.815229] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.825200] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52822e26-d690-ac6f-19d8-23db8367cf3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.952714] env[69927]: DEBUG nova.network.neutron [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Successfully created port: ee4c8bc2-33ea-425e-adbc-5a75ab428943 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 667.075778] env[69927]: DEBUG nova.scheduler.client.report [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 667.135363] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0c3e4c29-4c1f-4284-983c-ec377fb696ca tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Lock "4ad26720-ed24-4963-9519-3345dbfeb9a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.504s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.226953] env[69927]: DEBUG nova.compute.manager [req-531017b1-001f-4a97-ab9f-a2877da8e0eb req-6f2e8a0c-b791-46f1-8686-b7043e757883 service nova] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Received event network-changed-ea8b5988-95e7-4a5b-b062-03d4d72eb9e5 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 667.226953] env[69927]: DEBUG nova.compute.manager [req-531017b1-001f-4a97-ab9f-a2877da8e0eb req-6f2e8a0c-b791-46f1-8686-b7043e757883 service nova] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Refreshing instance network info cache due to event network-changed-ea8b5988-95e7-4a5b-b062-03d4d72eb9e5. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 667.226953] env[69927]: DEBUG oslo_concurrency.lockutils [req-531017b1-001f-4a97-ab9f-a2877da8e0eb req-6f2e8a0c-b791-46f1-8686-b7043e757883 service nova] Acquiring lock "refresh_cache-820c50b9-3c18-41bc-a000-22425b1dbb27" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.227355] env[69927]: DEBUG oslo_concurrency.lockutils [req-531017b1-001f-4a97-ab9f-a2877da8e0eb req-6f2e8a0c-b791-46f1-8686-b7043e757883 service nova] Acquired lock "refresh_cache-820c50b9-3c18-41bc-a000-22425b1dbb27" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 667.227584] env[69927]: DEBUG nova.network.neutron [req-531017b1-001f-4a97-ab9f-a2877da8e0eb req-6f2e8a0c-b791-46f1-8686-b7043e757883 service nova] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Refreshing network info cache for port ea8b5988-95e7-4a5b-b062-03d4d72eb9e5 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 667.309100] env[69927]: DEBUG nova.compute.manager [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 667.323447] env[69927]: DEBUG nova.network.neutron [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Updating instance_info_cache with network_info: [{"id": "8ed32df7-4147-4bf4-bedd-196b87bbea4d", "address": "fa:16:3e:2a:14:ed", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.199", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ed32df7-41", "ovs_interfaceid": "8ed32df7-4147-4bf4-bedd-196b87bbea4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.331674] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52822e26-d690-ac6f-19d8-23db8367cf3c, 'name': SearchDatastore_Task, 'duration_secs': 0.03837} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.334954] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-215a1651-d529-4cac-9b20-e7e995168c83 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.341739] env[69927]: DEBUG nova.virt.hardware [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 667.341984] env[69927]: DEBUG nova.virt.hardware [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 667.342154] env[69927]: DEBUG nova.virt.hardware [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 667.342333] env[69927]: DEBUG nova.virt.hardware [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 667.342479] env[69927]: DEBUG nova.virt.hardware [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 667.342689] env[69927]: DEBUG nova.virt.hardware [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 667.342829] env[69927]: DEBUG nova.virt.hardware [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 667.343103] env[69927]: DEBUG nova.virt.hardware [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 667.343587] env[69927]: DEBUG nova.virt.hardware [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 667.343587] env[69927]: DEBUG nova.virt.hardware [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 667.343587] env[69927]: DEBUG nova.virt.hardware [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 667.345197] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-936b4644-c9ab-4337-b709-5433accff99d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.349850] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Waiting for the task: (returnval){ [ 667.349850] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52556218-a9a6-97ad-27e7-9af0a8b99df7" [ 667.349850] env[69927]: _type = "Task" [ 667.349850] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.357138] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4520745a-6980-4d54-968a-e9ee0c8bd219 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.366228] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52556218-a9a6-97ad-27e7-9af0a8b99df7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.587285] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.322s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.589714] env[69927]: DEBUG nova.compute.manager [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 667.591900] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.836s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.596827] env[69927]: INFO nova.compute.claims [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 667.837607] env[69927]: DEBUG oslo_concurrency.lockutils [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Releasing lock "refresh_cache-053f6f00-a818-473b-a887-4ec45174c1d5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 667.837607] env[69927]: DEBUG nova.compute.manager [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Instance network_info: |[{"id": "8ed32df7-4147-4bf4-bedd-196b87bbea4d", "address": "fa:16:3e:2a:14:ed", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.199", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ed32df7-41", "ovs_interfaceid": "8ed32df7-4147-4bf4-bedd-196b87bbea4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 667.838597] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:14:ed', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '10b81051-1eb1-406b-888c-4548c470c77e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8ed32df7-4147-4bf4-bedd-196b87bbea4d', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 667.858216] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Creating folder: Project (b0f8526f19884878a1ca5cb662729d7a). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 667.858216] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4dafd83d-b8d6-45db-99c6-044893acf059 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.872025] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52556218-a9a6-97ad-27e7-9af0a8b99df7, 'name': SearchDatastore_Task, 'duration_secs': 0.017992} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.872326] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 667.872677] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 820c50b9-3c18-41bc-a000-22425b1dbb27/820c50b9-3c18-41bc-a000-22425b1dbb27.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 667.873142] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d3601fda-3d5b-4675-9c62-26f5ffe1247a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.877717] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Created folder: Project (b0f8526f19884878a1ca5cb662729d7a) in parent group-v811283. [ 667.877717] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Creating folder: Instances. Parent ref: group-v811293. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 667.877980] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ddb9807e-bb76-421b-9e66-8f2a1355c53e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.882416] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Waiting for the task: (returnval){ [ 667.882416] env[69927]: value = "task-4095355" [ 667.882416] env[69927]: _type = "Task" [ 667.882416] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.892286] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Task: {'id': task-4095355, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.894078] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Created folder: Instances in parent group-v811293. [ 667.894478] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 667.894541] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 667.895547] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5b3c6620-5d51-4df6-8e39-7d4d8331fc28 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.917259] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 667.917259] env[69927]: value = "task-4095357" [ 667.917259] env[69927]: _type = "Task" [ 667.917259] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.927352] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095357, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.106131] env[69927]: DEBUG nova.compute.utils [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 668.115957] env[69927]: DEBUG nova.compute.manager [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 668.115957] env[69927]: DEBUG nova.network.neutron [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 668.287804] env[69927]: DEBUG nova.policy [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b66d74a85f3d4d31a4efce8a8df01cc0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be278be46f3d48df818c834df17c663f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 668.401538] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Task: {'id': task-4095355, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.420014] env[69927]: DEBUG nova.network.neutron [req-531017b1-001f-4a97-ab9f-a2877da8e0eb req-6f2e8a0c-b791-46f1-8686-b7043e757883 service nova] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Updated VIF entry in instance network info cache for port ea8b5988-95e7-4a5b-b062-03d4d72eb9e5. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 668.421071] env[69927]: DEBUG nova.network.neutron [req-531017b1-001f-4a97-ab9f-a2877da8e0eb req-6f2e8a0c-b791-46f1-8686-b7043e757883 service nova] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Updating instance_info_cache with network_info: [{"id": "ea8b5988-95e7-4a5b-b062-03d4d72eb9e5", "address": "fa:16:3e:a6:9a:f6", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.212", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea8b5988-95", "ovs_interfaceid": "ea8b5988-95e7-4a5b-b062-03d4d72eb9e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.436863] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095357, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.616050] env[69927]: DEBUG nova.compute.manager [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 668.903116] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Task: {'id': task-4095355, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.626514} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.903449] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 820c50b9-3c18-41bc-a000-22425b1dbb27/820c50b9-3c18-41bc-a000-22425b1dbb27.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 668.905431] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 668.905431] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3ff4543d-b7bd-4b96-9127-472db1b11e1b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.915068] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Waiting for the task: (returnval){ [ 668.915068] env[69927]: value = "task-4095358" [ 668.915068] env[69927]: _type = "Task" [ 668.915068] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.923495] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e339c023-6a41-4c3e-b21b-12461e9c99a1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.927346] env[69927]: DEBUG oslo_concurrency.lockutils [req-531017b1-001f-4a97-ab9f-a2877da8e0eb req-6f2e8a0c-b791-46f1-8686-b7043e757883 service nova] Releasing lock "refresh_cache-820c50b9-3c18-41bc-a000-22425b1dbb27" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 668.940664] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Task: {'id': task-4095358, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.945542] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80efc9e4-d5b9-4445-b0a5-338a33c26c6b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.963944] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095357, 'name': CreateVM_Task, 'duration_secs': 0.538572} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.992671] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 668.995279] env[69927]: DEBUG oslo_vmware.service [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fe59566-9dc3-4bc3-a996-5a1537a84950 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.999087] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0da579-101a-4c95-b367-ad6fba4de797 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.010681] env[69927]: DEBUG oslo_concurrency.lockutils [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.010869] env[69927]: DEBUG oslo_concurrency.lockutils [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.011371] env[69927]: DEBUG oslo_concurrency.lockutils [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 669.011706] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e6f2e72-5eef-4c6a-8282-1cd67d69c70f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.015411] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-099d1492-1fef-4349-bae2-32665f03869b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.023428] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Waiting for the task: (returnval){ [ 669.023428] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5282412b-e4dd-1ee8-0285-0d439f919779" [ 669.023428] env[69927]: _type = "Task" [ 669.023428] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.034073] env[69927]: DEBUG nova.compute.provider_tree [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 669.043238] env[69927]: DEBUG oslo_concurrency.lockutils [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 669.043513] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 669.043749] env[69927]: DEBUG oslo_concurrency.lockutils [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.043887] env[69927]: DEBUG oslo_concurrency.lockutils [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.044093] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 669.045052] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-72b348d6-15a5-4079-a93a-57d2db371a3c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.051352] env[69927]: DEBUG nova.network.neutron [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Successfully updated port: 4df42787-eead-48f3-a537-5f3f2a36a836 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 669.056776] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 669.056776] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 669.057692] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f68f7f1-f4bb-4803-8f73-381bfc1dfeab {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.065433] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0effe8e-b6d0-4865-9a27-fbef94a9ee85 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.072409] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Waiting for the task: (returnval){ [ 669.072409] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520a89b6-0d16-cc03-2780-17cb6bc413c3" [ 669.072409] env[69927]: _type = "Task" [ 669.072409] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.086505] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520a89b6-0d16-cc03-2780-17cb6bc413c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.424243] env[69927]: DEBUG nova.network.neutron [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Successfully created port: 67c7df0a-bb67-40ee-9a2d-11cea9dbacb7 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 669.432978] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Task: {'id': task-4095358, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.098147} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.435515] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 669.435515] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d8f46f-54d4-444d-95f2-1cc5d9f9d6c9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.460864] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] 820c50b9-3c18-41bc-a000-22425b1dbb27/820c50b9-3c18-41bc-a000-22425b1dbb27.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 669.461210] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-491c5124-7f77-4197-add2-1e81e94931cf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.484180] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Waiting for the task: (returnval){ [ 669.484180] env[69927]: value = "task-4095359" [ 669.484180] env[69927]: _type = "Task" [ 669.484180] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.496684] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Task: {'id': task-4095359, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.538767] env[69927]: DEBUG nova.scheduler.client.report [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 669.554660] env[69927]: DEBUG oslo_concurrency.lockutils [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Acquiring lock "refresh_cache-c3c36508-96e1-4e75-931b-c7f0740b74e1" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.554784] env[69927]: DEBUG oslo_concurrency.lockutils [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Acquired lock "refresh_cache-c3c36508-96e1-4e75-931b-c7f0740b74e1" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.554858] env[69927]: DEBUG nova.network.neutron [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 669.584160] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Preparing fetch location {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 669.584160] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Creating directory with path [datastore2] vmware_temp/1bde0617-9d2b-460d-b1a7-c7a05981fb2d/f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 669.584288] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-155a8d2c-f7ce-4cbf-88e7-bf1e687eeedf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.623852] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Created directory with path [datastore2] vmware_temp/1bde0617-9d2b-460d-b1a7-c7a05981fb2d/f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 669.624321] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Fetch image to [datastore2] vmware_temp/1bde0617-9d2b-460d-b1a7-c7a05981fb2d/f524494e-9179-4b3e-a3e2-782f019def24/tmp-sparse.vmdk {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 669.624621] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Downloading image file data f524494e-9179-4b3e-a3e2-782f019def24 to [datastore2] vmware_temp/1bde0617-9d2b-460d-b1a7-c7a05981fb2d/f524494e-9179-4b3e-a3e2-782f019def24/tmp-sparse.vmdk on the data store datastore2 {{(pid=69927) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 669.625627] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20808d35-9205-442a-88c4-8af3a68e7114 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.630401] env[69927]: DEBUG nova.compute.manager [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 669.636690] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76670e58-1495-4b29-a137-ff2284e25434 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.648161] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73791e43-a9a7-42c7-9ef7-990b01731bf7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.687896] env[69927]: DEBUG nova.virt.hardware [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 669.688164] env[69927]: DEBUG nova.virt.hardware [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 669.688318] env[69927]: DEBUG nova.virt.hardware [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 669.688519] env[69927]: DEBUG nova.virt.hardware [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 669.688983] env[69927]: DEBUG nova.virt.hardware [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 669.688983] env[69927]: DEBUG nova.virt.hardware [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 669.689113] env[69927]: DEBUG nova.virt.hardware [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 669.689181] env[69927]: DEBUG nova.virt.hardware [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 669.689352] env[69927]: DEBUG nova.virt.hardware [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 669.689509] env[69927]: DEBUG nova.virt.hardware [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 669.689671] env[69927]: DEBUG nova.virt.hardware [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 669.690527] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-354a9b6c-7c4d-47c6-ac34-eb407194a3b6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.693770] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a53e0f98-b715-4ccd-af03-c49e77e9d8f5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.704089] env[69927]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-080fc9a4-3266-4a90-81f4-ddcc6f95c2f0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.707203] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd618c6-0ab2-4228-89c2-d0d224a46bf7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.735332] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Downloading image file data f524494e-9179-4b3e-a3e2-782f019def24 to the data store datastore2 {{(pid=69927) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 669.815498] env[69927]: DEBUG oslo_vmware.rw_handles [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1bde0617-9d2b-460d-b1a7-c7a05981fb2d/f524494e-9179-4b3e-a3e2-782f019def24/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69927) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 669.884106] env[69927]: DEBUG nova.compute.manager [None req-94ec68c9-b645-49b5-9f60-fd67f25b3beb tempest-ServerDiagnosticsV248Test-931281013 tempest-ServerDiagnosticsV248Test-931281013-project-admin] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 669.884106] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f03fd48c-4eac-4c44-b9e8-b32ca03cd978 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.894701] env[69927]: INFO nova.compute.manager [None req-94ec68c9-b645-49b5-9f60-fd67f25b3beb tempest-ServerDiagnosticsV248Test-931281013 tempest-ServerDiagnosticsV248Test-931281013-project-admin] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Retrieving diagnostics [ 669.895973] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3513501a-a1c8-474b-b86d-110878b1c821 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.006610] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Task: {'id': task-4095359, 'name': ReconfigVM_Task, 'duration_secs': 0.32939} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.006610] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Reconfigured VM instance instance-00000002 to attach disk [datastore1] 820c50b9-3c18-41bc-a000-22425b1dbb27/820c50b9-3c18-41bc-a000-22425b1dbb27.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 670.006610] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6c29aff8-34e3-4527-80f4-e826f4fbd6dd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.019043] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Waiting for the task: (returnval){ [ 670.019043] env[69927]: value = "task-4095361" [ 670.019043] env[69927]: _type = "Task" [ 670.019043] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.032214] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Task: {'id': task-4095361, 'name': Rename_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.044315] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.452s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.045109] env[69927]: DEBUG nova.compute.manager [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 670.055957] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 11.027s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.056222] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.056652] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69927) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 670.056652] env[69927]: DEBUG oslo_concurrency.lockutils [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.889s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.058356] env[69927]: INFO nova.compute.claims [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 670.067910] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c066a83a-a733-4aaa-b5a7-c8ddc54b1ccd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.083789] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62f68553-2c06-4aa6-b692-25d0c423183d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.105925] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddbc3bb8-30d7-4c33-8e2a-2aa52398ec0f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.116440] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4792ba6-07be-4aa3-a9b7-adcd479ffb20 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.154505] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180855MB free_disk=17GB free_vcpus=48 pci_devices=None {{(pid=69927) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 670.154505] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.176960] env[69927]: DEBUG nova.network.neutron [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 670.238440] env[69927]: DEBUG nova.network.neutron [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Successfully updated port: ee4c8bc2-33ea-425e-adbc-5a75ab428943 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 670.506742] env[69927]: DEBUG oslo_vmware.rw_handles [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Completed reading data from the image iterator. {{(pid=69927) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 670.506742] env[69927]: DEBUG oslo_vmware.rw_handles [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1bde0617-9d2b-460d-b1a7-c7a05981fb2d/f524494e-9179-4b3e-a3e2-782f019def24/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69927) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 670.536785] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Task: {'id': task-4095361, 'name': Rename_Task, 'duration_secs': 0.171581} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.539395] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 670.539829] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4b40633e-f0d4-45a0-bba8-713ab284a444 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.550281] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Waiting for the task: (returnval){ [ 670.550281] env[69927]: value = "task-4095362" [ 670.550281] env[69927]: _type = "Task" [ 670.550281] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.564265] env[69927]: DEBUG nova.compute.utils [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 670.566666] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Task: {'id': task-4095362, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.568207] env[69927]: DEBUG nova.compute.manager [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Not allocating networking since 'none' was specified. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 670.666846] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Downloaded image file data f524494e-9179-4b3e-a3e2-782f019def24 to vmware_temp/1bde0617-9d2b-460d-b1a7-c7a05981fb2d/f524494e-9179-4b3e-a3e2-782f019def24/tmp-sparse.vmdk on the data store datastore2 {{(pid=69927) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 670.668758] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Caching image {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 670.671751] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Copying Virtual Disk [datastore2] vmware_temp/1bde0617-9d2b-460d-b1a7-c7a05981fb2d/f524494e-9179-4b3e-a3e2-782f019def24/tmp-sparse.vmdk to [datastore2] vmware_temp/1bde0617-9d2b-460d-b1a7-c7a05981fb2d/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 670.672602] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ad9a90bb-2ffc-4fbe-a27a-cc6db101373e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.683665] env[69927]: DEBUG nova.compute.manager [req-f399a24a-6181-4a13-851c-48b27df1b9d4 req-eecbba30-bdc4-4632-a2bd-02cc23acd553 service nova] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Received event network-changed-8ed32df7-4147-4bf4-bedd-196b87bbea4d {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 670.683927] env[69927]: DEBUG nova.compute.manager [req-f399a24a-6181-4a13-851c-48b27df1b9d4 req-eecbba30-bdc4-4632-a2bd-02cc23acd553 service nova] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Refreshing instance network info cache due to event network-changed-8ed32df7-4147-4bf4-bedd-196b87bbea4d. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 670.684145] env[69927]: DEBUG oslo_concurrency.lockutils [req-f399a24a-6181-4a13-851c-48b27df1b9d4 req-eecbba30-bdc4-4632-a2bd-02cc23acd553 service nova] Acquiring lock "refresh_cache-053f6f00-a818-473b-a887-4ec45174c1d5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.684305] env[69927]: DEBUG oslo_concurrency.lockutils [req-f399a24a-6181-4a13-851c-48b27df1b9d4 req-eecbba30-bdc4-4632-a2bd-02cc23acd553 service nova] Acquired lock "refresh_cache-053f6f00-a818-473b-a887-4ec45174c1d5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.684466] env[69927]: DEBUG nova.network.neutron [req-f399a24a-6181-4a13-851c-48b27df1b9d4 req-eecbba30-bdc4-4632-a2bd-02cc23acd553 service nova] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Refreshing network info cache for port 8ed32df7-4147-4bf4-bedd-196b87bbea4d {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 670.691050] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Waiting for the task: (returnval){ [ 670.691050] env[69927]: value = "task-4095363" [ 670.691050] env[69927]: _type = "Task" [ 670.691050] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.706878] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095363, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.726021] env[69927]: DEBUG nova.network.neutron [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Updating instance_info_cache with network_info: [{"id": "4df42787-eead-48f3-a537-5f3f2a36a836", "address": "fa:16:3e:d5:78:c5", "network": {"id": "383f690a-a926-4188-b7ad-293c8591e1fa", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1555439978-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf8f9d710b8d4390b0c0bc0c25c1aac6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "136c3499-9ca0-4f85-903d-1f194aa66ed9", "external-id": "nsx-vlan-transportzone-307", "segmentation_id": 307, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4df42787-ee", "ovs_interfaceid": "4df42787-eead-48f3-a537-5f3f2a36a836", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.741472] env[69927]: DEBUG oslo_concurrency.lockutils [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Acquiring lock "refresh_cache-2cdfda66-1d93-4960-a129-2788f10fa593" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.742492] env[69927]: DEBUG oslo_concurrency.lockutils [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Acquired lock "refresh_cache-2cdfda66-1d93-4960-a129-2788f10fa593" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.742492] env[69927]: DEBUG nova.network.neutron [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 671.066779] env[69927]: DEBUG nova.compute.manager [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 671.070135] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Task: {'id': task-4095362, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.211223] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095363, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.229554] env[69927]: DEBUG oslo_concurrency.lockutils [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Releasing lock "refresh_cache-c3c36508-96e1-4e75-931b-c7f0740b74e1" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.230054] env[69927]: DEBUG nova.compute.manager [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Instance network_info: |[{"id": "4df42787-eead-48f3-a537-5f3f2a36a836", "address": "fa:16:3e:d5:78:c5", "network": {"id": "383f690a-a926-4188-b7ad-293c8591e1fa", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1555439978-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf8f9d710b8d4390b0c0bc0c25c1aac6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "136c3499-9ca0-4f85-903d-1f194aa66ed9", "external-id": "nsx-vlan-transportzone-307", "segmentation_id": 307, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4df42787-ee", "ovs_interfaceid": "4df42787-eead-48f3-a537-5f3f2a36a836", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 671.230992] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:78:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '136c3499-9ca0-4f85-903d-1f194aa66ed9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4df42787-eead-48f3-a537-5f3f2a36a836', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 671.242521] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Creating folder: Project (cf8f9d710b8d4390b0c0bc0c25c1aac6). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 671.246203] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d154f885-f338-435e-9a1a-82ca17f8ea2a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.264678] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquiring lock "4bf59fae-8029-421b-95fd-a0d008891ce7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 671.264947] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "4bf59fae-8029-421b-95fd-a0d008891ce7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 671.276162] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Created folder: Project (cf8f9d710b8d4390b0c0bc0c25c1aac6) in parent group-v811283. [ 671.276465] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Creating folder: Instances. Parent ref: group-v811297. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 671.279562] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32b204e2-3283-4b2a-bd86-c62b32758992 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.292519] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Created folder: Instances in parent group-v811297. [ 671.292688] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 671.292860] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 671.293381] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c78d4989-9091-4d10-8a6d-eaa17a0e32a5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.323079] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquiring lock "ab8a8acc-cab7-4a82-bd90-b34147f17b0e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 671.323987] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "ab8a8acc-cab7-4a82-bd90-b34147f17b0e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 671.327309] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d2845b-48b1-4f89-bf1f-970465c11822 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.328522] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 671.328522] env[69927]: value = "task-4095366" [ 671.328522] env[69927]: _type = "Task" [ 671.328522] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.336598] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a966cb7-c463-430a-bc31-ccb6d1a6fbfa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.348059] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095366, 'name': CreateVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.385530] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a8cfa10-b209-4c5a-ac9a-89a0a68597e1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.388620] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquiring lock "b1bcbcfb-2320-434c-901f-0f6a476a3069" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 671.388929] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "b1bcbcfb-2320-434c-901f-0f6a476a3069" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 671.399333] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdfa71ad-2f32-4380-9420-38d5a4932656 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.416327] env[69927]: DEBUG nova.compute.provider_tree [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 671.562712] env[69927]: DEBUG oslo_vmware.api [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Task: {'id': task-4095362, 'name': PowerOnVM_Task, 'duration_secs': 0.539737} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.564314] env[69927]: DEBUG nova.network.neutron [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 671.566237] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 671.566444] env[69927]: INFO nova.compute.manager [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Took 12.35 seconds to spawn the instance on the hypervisor. [ 671.566623] env[69927]: DEBUG nova.compute.manager [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 671.567725] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b96a8282-4ca0-4a45-a637-971e1c514ec2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.702371] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095363, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.894135} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.702632] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Copied Virtual Disk [datastore2] vmware_temp/1bde0617-9d2b-460d-b1a7-c7a05981fb2d/f524494e-9179-4b3e-a3e2-782f019def24/tmp-sparse.vmdk to [datastore2] vmware_temp/1bde0617-9d2b-460d-b1a7-c7a05981fb2d/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 671.702812] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Deleting the datastore file [datastore2] vmware_temp/1bde0617-9d2b-460d-b1a7-c7a05981fb2d/f524494e-9179-4b3e-a3e2-782f019def24/tmp-sparse.vmdk {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 671.703064] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-41a401ae-a0b3-45d9-92a2-fa11cefe4aa6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.710224] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Waiting for the task: (returnval){ [ 671.710224] env[69927]: value = "task-4095367" [ 671.710224] env[69927]: _type = "Task" [ 671.710224] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.719996] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095367, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.756204] env[69927]: DEBUG nova.network.neutron [req-f399a24a-6181-4a13-851c-48b27df1b9d4 req-eecbba30-bdc4-4632-a2bd-02cc23acd553 service nova] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Updated VIF entry in instance network info cache for port 8ed32df7-4147-4bf4-bedd-196b87bbea4d. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 671.756204] env[69927]: DEBUG nova.network.neutron [req-f399a24a-6181-4a13-851c-48b27df1b9d4 req-eecbba30-bdc4-4632-a2bd-02cc23acd553 service nova] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Updating instance_info_cache with network_info: [{"id": "8ed32df7-4147-4bf4-bedd-196b87bbea4d", "address": "fa:16:3e:2a:14:ed", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.199", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ed32df7-41", "ovs_interfaceid": "8ed32df7-4147-4bf4-bedd-196b87bbea4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.771047] env[69927]: DEBUG nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 671.829589] env[69927]: DEBUG nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 671.849298] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095366, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.895151] env[69927]: DEBUG nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 671.926071] env[69927]: DEBUG nova.scheduler.client.report [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 672.021973] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Acquiring lock "8edafb98-331a-45b8-8de8-4ba04b035ffd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.021973] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Lock "8edafb98-331a-45b8-8de8-4ba04b035ffd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.080043] env[69927]: DEBUG nova.compute.manager [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 672.095230] env[69927]: INFO nova.compute.manager [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Took 19.31 seconds to build instance. [ 672.125175] env[69927]: DEBUG nova.virt.hardware [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 672.125439] env[69927]: DEBUG nova.virt.hardware [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 672.125960] env[69927]: DEBUG nova.virt.hardware [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 672.125960] env[69927]: DEBUG nova.virt.hardware [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 672.125960] env[69927]: DEBUG nova.virt.hardware [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 672.126157] env[69927]: DEBUG nova.virt.hardware [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 672.126404] env[69927]: DEBUG nova.virt.hardware [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 672.126558] env[69927]: DEBUG nova.virt.hardware [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 672.126907] env[69927]: DEBUG nova.virt.hardware [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 672.126907] env[69927]: DEBUG nova.virt.hardware [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 672.127057] env[69927]: DEBUG nova.virt.hardware [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 672.128992] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed28b3f3-00e7-4894-9d69-ad388bc1654d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.138835] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a937e35-321e-4ea0-b091-b2a0fbc773b2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.157718] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Instance VIF info [] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 672.163858] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Creating folder: Project (eab3ca5fec2d4b3da520b24ac68bab78). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 672.165389] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-63c9695c-1bd5-4540-afad-56c2fb51ed57 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.182063] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Created folder: Project (eab3ca5fec2d4b3da520b24ac68bab78) in parent group-v811283. [ 672.182063] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Creating folder: Instances. Parent ref: group-v811300. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 672.182063] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2b47665c-0585-4354-adbc-40cbf80e827f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.198817] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Created folder: Instances in parent group-v811300. [ 672.199146] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 672.199416] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 672.199646] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d6f7fb41-f49c-4243-895d-98509979a66e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.217880] env[69927]: DEBUG nova.network.neutron [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Updating instance_info_cache with network_info: [{"id": "ee4c8bc2-33ea-425e-adbc-5a75ab428943", "address": "fa:16:3e:3a:01:21", "network": {"id": "8cfd4578-2ab0-4814-804e-a670f4f295dc", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-814812168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "731443ca96b749c1861bf82f6f36c238", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee4c8bc2-33", "ovs_interfaceid": "ee4c8bc2-33ea-425e-adbc-5a75ab428943", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.222696] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 672.222696] env[69927]: value = "task-4095371" [ 672.222696] env[69927]: _type = "Task" [ 672.222696] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.225760] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095367, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.058464} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.228927] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 672.229241] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Moving file from [datastore2] vmware_temp/1bde0617-9d2b-460d-b1a7-c7a05981fb2d/f524494e-9179-4b3e-a3e2-782f019def24 to [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24. {{(pid=69927) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 672.229723] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-1df7d2c5-85e7-4724-b2cf-da8033053f9a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.240088] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095371, 'name': CreateVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.242660] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Waiting for the task: (returnval){ [ 672.242660] env[69927]: value = "task-4095372" [ 672.242660] env[69927]: _type = "Task" [ 672.242660] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.255078] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095372, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.259030] env[69927]: DEBUG oslo_concurrency.lockutils [req-f399a24a-6181-4a13-851c-48b27df1b9d4 req-eecbba30-bdc4-4632-a2bd-02cc23acd553 service nova] Releasing lock "refresh_cache-053f6f00-a818-473b-a887-4ec45174c1d5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.293662] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.356559] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095366, 'name': CreateVM_Task, 'duration_secs': 0.61912} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.356559] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 672.356559] env[69927]: DEBUG oslo_concurrency.lockutils [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.357147] env[69927]: DEBUG oslo_concurrency.lockutils [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 672.359466] env[69927]: DEBUG oslo_concurrency.lockutils [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 672.361881] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b21441b9-8957-4203-9980-32bdedd42ddc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.365878] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Waiting for the task: (returnval){ [ 672.365878] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52af177d-579b-ead4-00ee-ec2d3c04d058" [ 672.365878] env[69927]: _type = "Task" [ 672.365878] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.373580] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.378411] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52af177d-579b-ead4-00ee-ec2d3c04d058, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.425843] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.433787] env[69927]: DEBUG oslo_concurrency.lockutils [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.377s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.434914] env[69927]: DEBUG nova.compute.manager [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 672.437512] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.284s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.523416] env[69927]: DEBUG nova.compute.manager [req-b7b4e9c2-92c6-4a00-9aad-d06b3e06ef03 req-3b72036c-685d-4f50-9a62-809d0be0f061 service nova] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Received event network-vif-plugged-4df42787-eead-48f3-a537-5f3f2a36a836 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 672.523416] env[69927]: DEBUG oslo_concurrency.lockutils [req-b7b4e9c2-92c6-4a00-9aad-d06b3e06ef03 req-3b72036c-685d-4f50-9a62-809d0be0f061 service nova] Acquiring lock "c3c36508-96e1-4e75-931b-c7f0740b74e1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.523416] env[69927]: DEBUG oslo_concurrency.lockutils [req-b7b4e9c2-92c6-4a00-9aad-d06b3e06ef03 req-3b72036c-685d-4f50-9a62-809d0be0f061 service nova] Lock "c3c36508-96e1-4e75-931b-c7f0740b74e1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.523683] env[69927]: DEBUG oslo_concurrency.lockutils [req-b7b4e9c2-92c6-4a00-9aad-d06b3e06ef03 req-3b72036c-685d-4f50-9a62-809d0be0f061 service nova] Lock "c3c36508-96e1-4e75-931b-c7f0740b74e1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.523718] env[69927]: DEBUG nova.compute.manager [req-b7b4e9c2-92c6-4a00-9aad-d06b3e06ef03 req-3b72036c-685d-4f50-9a62-809d0be0f061 service nova] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] No waiting events found dispatching network-vif-plugged-4df42787-eead-48f3-a537-5f3f2a36a836 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 672.523866] env[69927]: WARNING nova.compute.manager [req-b7b4e9c2-92c6-4a00-9aad-d06b3e06ef03 req-3b72036c-685d-4f50-9a62-809d0be0f061 service nova] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Received unexpected event network-vif-plugged-4df42787-eead-48f3-a537-5f3f2a36a836 for instance with vm_state building and task_state spawning. [ 672.525527] env[69927]: DEBUG nova.compute.manager [req-b7b4e9c2-92c6-4a00-9aad-d06b3e06ef03 req-3b72036c-685d-4f50-9a62-809d0be0f061 service nova] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Received event network-changed-4df42787-eead-48f3-a537-5f3f2a36a836 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 672.525759] env[69927]: DEBUG nova.compute.manager [req-b7b4e9c2-92c6-4a00-9aad-d06b3e06ef03 req-3b72036c-685d-4f50-9a62-809d0be0f061 service nova] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Refreshing instance network info cache due to event network-changed-4df42787-eead-48f3-a537-5f3f2a36a836. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 672.525869] env[69927]: DEBUG oslo_concurrency.lockutils [req-b7b4e9c2-92c6-4a00-9aad-d06b3e06ef03 req-3b72036c-685d-4f50-9a62-809d0be0f061 service nova] Acquiring lock "refresh_cache-c3c36508-96e1-4e75-931b-c7f0740b74e1" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.526013] env[69927]: DEBUG oslo_concurrency.lockutils [req-b7b4e9c2-92c6-4a00-9aad-d06b3e06ef03 req-3b72036c-685d-4f50-9a62-809d0be0f061 service nova] Acquired lock "refresh_cache-c3c36508-96e1-4e75-931b-c7f0740b74e1" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 672.527561] env[69927]: DEBUG nova.network.neutron [req-b7b4e9c2-92c6-4a00-9aad-d06b3e06ef03 req-3b72036c-685d-4f50-9a62-809d0be0f061 service nova] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Refreshing network info cache for port 4df42787-eead-48f3-a537-5f3f2a36a836 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 672.600296] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ebe4c8c3-6baf-436c-a0c8-589f63e523ee tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Lock "820c50b9-3c18-41bc-a000-22425b1dbb27" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.825s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.639029] env[69927]: DEBUG nova.network.neutron [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Successfully updated port: 67c7df0a-bb67-40ee-9a2d-11cea9dbacb7 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 672.720762] env[69927]: DEBUG oslo_concurrency.lockutils [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Releasing lock "refresh_cache-2cdfda66-1d93-4960-a129-2788f10fa593" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.721132] env[69927]: DEBUG nova.compute.manager [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Instance network_info: |[{"id": "ee4c8bc2-33ea-425e-adbc-5a75ab428943", "address": "fa:16:3e:3a:01:21", "network": {"id": "8cfd4578-2ab0-4814-804e-a670f4f295dc", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-814812168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "731443ca96b749c1861bf82f6f36c238", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee4c8bc2-33", "ovs_interfaceid": "ee4c8bc2-33ea-425e-adbc-5a75ab428943", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 672.721549] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3a:01:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1e7a4976-597e-4636-990e-6062b5faadee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ee4c8bc2-33ea-425e-adbc-5a75ab428943', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 672.733231] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Creating folder: Project (731443ca96b749c1861bf82f6f36c238). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 672.733947] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d85007d7-d596-4e84-88ae-297262372ba1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.746050] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095371, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.754190] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Created folder: Project (731443ca96b749c1861bf82f6f36c238) in parent group-v811283. [ 672.754190] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Creating folder: Instances. Parent ref: group-v811303. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 672.754190] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4b5eb515-440d-42dc-8338-67d84f8e7e52 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.759251] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095372, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.767088] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Created folder: Instances in parent group-v811303. [ 672.767487] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 672.767698] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 672.767943] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b873b93d-552c-427c-9078-e405eb64f52d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.794777] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 672.794777] env[69927]: value = "task-4095375" [ 672.794777] env[69927]: _type = "Task" [ 672.794777] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.804538] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095375, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.885988] env[69927]: DEBUG oslo_concurrency.lockutils [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.886309] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 672.886550] env[69927]: DEBUG oslo_concurrency.lockutils [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.942234] env[69927]: DEBUG nova.compute.utils [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 672.947047] env[69927]: DEBUG nova.compute.manager [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 672.947310] env[69927]: DEBUG nova.network.neutron [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 673.022097] env[69927]: DEBUG nova.policy [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5978ed37f4ef4cc59e5ca99b1c3bc259', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fc73b36a965a4503b546cc90a2950441', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 673.110021] env[69927]: DEBUG nova.compute.manager [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 673.143370] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "refresh_cache-9348e368-cc3c-4bde-91ae-26fd03ad536a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.143963] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquired lock "refresh_cache-9348e368-cc3c-4bde-91ae-26fd03ad536a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.144349] env[69927]: DEBUG nova.network.neutron [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 673.205615] env[69927]: DEBUG oslo_concurrency.lockutils [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Acquiring lock "ee422a46-c6e4-4098-8f74-b9f0779d0fba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.209486] env[69927]: DEBUG oslo_concurrency.lockutils [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Lock "ee422a46-c6e4-4098-8f74-b9f0779d0fba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.246862] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095371, 'name': CreateVM_Task, 'duration_secs': 0.950843} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.250235] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 673.250731] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.250884] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.251245] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 673.251932] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6caf7b41-abcb-4c5e-9018-5217b7a71c57 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.257545] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095372, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.656375} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.258952] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] File moved {{(pid=69927) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 673.259160] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Cleaning up location [datastore2] vmware_temp/1bde0617-9d2b-460d-b1a7-c7a05981fb2d {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 673.259438] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Deleting the datastore file [datastore2] vmware_temp/1bde0617-9d2b-460d-b1a7-c7a05981fb2d {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 673.259755] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Waiting for the task: (returnval){ [ 673.259755] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]527b24e1-9dad-96cf-c666-cf18a3e4d13e" [ 673.259755] env[69927]: _type = "Task" [ 673.259755] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.259977] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-74a8cdc6-076c-4934-a691-a82e12b26a68 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.275725] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]527b24e1-9dad-96cf-c666-cf18a3e4d13e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.276045] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Waiting for the task: (returnval){ [ 673.276045] env[69927]: value = "task-4095376" [ 673.276045] env[69927]: _type = "Task" [ 673.276045] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.285454] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095376, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.309353] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095375, 'name': CreateVM_Task, 'duration_secs': 0.413004} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.309535] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 673.310235] env[69927]: DEBUG oslo_concurrency.lockutils [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.455472] env[69927]: DEBUG nova.compute.manager [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 673.641187] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.733191] env[69927]: DEBUG nova.network.neutron [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 673.774246] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]527b24e1-9dad-96cf-c666-cf18a3e4d13e, 'name': SearchDatastore_Task, 'duration_secs': 0.024981} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.778384] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.782890] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 673.782890] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.782890] env[69927]: DEBUG oslo_concurrency.lockutils [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.782890] env[69927]: DEBUG oslo_concurrency.lockutils [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 673.787028] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-900f2cba-00df-4fb2-bf70-3b41be85c240 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.794704] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095376, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.092956} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.796088] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 673.796978] env[69927]: DEBUG oslo_vmware.api [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Waiting for the task: (returnval){ [ 673.796978] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52868dbf-182a-4756-1454-2dd9384c1294" [ 673.796978] env[69927]: _type = "Task" [ 673.796978] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.797263] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc7228f9-a7c3-4b75-9d73-3b614f8a791d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.816461] env[69927]: DEBUG oslo_vmware.api [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52868dbf-182a-4756-1454-2dd9384c1294, 'name': SearchDatastore_Task, 'duration_secs': 0.012369} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.817170] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Waiting for the task: (returnval){ [ 673.817170] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52caec26-37d6-cf21-7661-2f973fe4e5da" [ 673.817170] env[69927]: _type = "Task" [ 673.817170] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.817170] env[69927]: DEBUG oslo_concurrency.lockutils [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.821608] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 673.822024] env[69927]: DEBUG oslo_concurrency.lockutils [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.836645] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52caec26-37d6-cf21-7661-2f973fe4e5da, 'name': SearchDatastore_Task, 'duration_secs': 0.013575} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.836645] env[69927]: DEBUG oslo_concurrency.lockutils [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.836828] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 053f6f00-a818-473b-a887-4ec45174c1d5/053f6f00-a818-473b-a887-4ec45174c1d5.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 673.837309] env[69927]: DEBUG oslo_concurrency.lockutils [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.837309] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 673.837734] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-464b84c9-3376-4dc3-b7b0-dd74315707ef {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.841231] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a982157a-aa8c-43ea-86ff-6b4e8f4b689c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.849377] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Waiting for the task: (returnval){ [ 673.849377] env[69927]: value = "task-4095377" [ 673.849377] env[69927]: _type = "Task" [ 673.849377] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.851067] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 673.851336] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 673.856423] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-756e1355-362e-4138-8562-b615031944f5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.863875] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Waiting for the task: (returnval){ [ 673.863875] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5284818d-fd38-0a81-bd5f-b3d0b64f9d3b" [ 673.863875] env[69927]: _type = "Task" [ 673.863875] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.867482] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095377, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.884401] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5284818d-fd38-0a81-bd5f-b3d0b64f9d3b, 'name': SearchDatastore_Task, 'duration_secs': 0.011567} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.888341] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-025a4d7c-25d3-4340-893c-5bfd67e4f66d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.895198] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Waiting for the task: (returnval){ [ 673.895198] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5276a36d-8a04-7d36-a008-5ae7979b9ce9" [ 673.895198] env[69927]: _type = "Task" [ 673.895198] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.906400] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5276a36d-8a04-7d36-a008-5ae7979b9ce9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.006318] env[69927]: DEBUG nova.network.neutron [req-b7b4e9c2-92c6-4a00-9aad-d06b3e06ef03 req-3b72036c-685d-4f50-9a62-809d0be0f061 service nova] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Updated VIF entry in instance network info cache for port 4df42787-eead-48f3-a537-5f3f2a36a836. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 674.010024] env[69927]: DEBUG nova.network.neutron [req-b7b4e9c2-92c6-4a00-9aad-d06b3e06ef03 req-3b72036c-685d-4f50-9a62-809d0be0f061 service nova] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Updating instance_info_cache with network_info: [{"id": "4df42787-eead-48f3-a537-5f3f2a36a836", "address": "fa:16:3e:d5:78:c5", "network": {"id": "383f690a-a926-4188-b7ad-293c8591e1fa", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1555439978-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf8f9d710b8d4390b0c0bc0c25c1aac6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "136c3499-9ca0-4f85-903d-1f194aa66ed9", "external-id": "nsx-vlan-transportzone-307", "segmentation_id": 307, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4df42787-ee", "ovs_interfaceid": "4df42787-eead-48f3-a537-5f3f2a36a836", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.013012] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance ee422a46-c6e4-4098-8f74-b9f0779d0fba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 674.121381] env[69927]: DEBUG nova.network.neutron [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Updating instance_info_cache with network_info: [{"id": "67c7df0a-bb67-40ee-9a2d-11cea9dbacb7", "address": "fa:16:3e:89:98:e2", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.164", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67c7df0a-bb", "ovs_interfaceid": "67c7df0a-bb67-40ee-9a2d-11cea9dbacb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.196180] env[69927]: DEBUG nova.network.neutron [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Successfully created port: b112a351-376b-4433-94a9-e8e186f3dff3 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 674.364999] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095377, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.410644] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5276a36d-8a04-7d36-a008-5ae7979b9ce9, 'name': SearchDatastore_Task, 'duration_secs': 0.011214} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.411069] env[69927]: DEBUG oslo_concurrency.lockutils [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 674.411438] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] c3c36508-96e1-4e75-931b-c7f0740b74e1/c3c36508-96e1-4e75-931b-c7f0740b74e1.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 674.411812] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 674.412044] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 674.412341] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-307dd2f9-4f0b-4771-bc1b-9fdf242d375e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.414870] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-03966d36-9cb2-4b4b-b25e-2aa864f1ffe0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.427339] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Waiting for the task: (returnval){ [ 674.427339] env[69927]: value = "task-4095379" [ 674.427339] env[69927]: _type = "Task" [ 674.427339] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.436119] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 674.436119] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 674.436119] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d18cbc0f-08c0-450b-9de7-ae16775b16ae {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.443114] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Task: {'id': task-4095379, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.447875] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Waiting for the task: (returnval){ [ 674.447875] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d52ac7-a974-fc79-b6ed-56d7dc15a2ed" [ 674.447875] env[69927]: _type = "Task" [ 674.447875] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.459877] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d52ac7-a974-fc79-b6ed-56d7dc15a2ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.479062] env[69927]: DEBUG nova.compute.manager [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 674.516587] env[69927]: DEBUG oslo_concurrency.lockutils [req-b7b4e9c2-92c6-4a00-9aad-d06b3e06ef03 req-3b72036c-685d-4f50-9a62-809d0be0f061 service nova] Releasing lock "refresh_cache-c3c36508-96e1-4e75-931b-c7f0740b74e1" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 674.517183] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 4bf59fae-8029-421b-95fd-a0d008891ce7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 674.524112] env[69927]: DEBUG nova.virt.hardware [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 674.524112] env[69927]: DEBUG nova.virt.hardware [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 674.524112] env[69927]: DEBUG nova.virt.hardware [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 674.524395] env[69927]: DEBUG nova.virt.hardware [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 674.524395] env[69927]: DEBUG nova.virt.hardware [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 674.525319] env[69927]: DEBUG nova.virt.hardware [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 674.525319] env[69927]: DEBUG nova.virt.hardware [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 674.525319] env[69927]: DEBUG nova.virt.hardware [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 674.525319] env[69927]: DEBUG nova.virt.hardware [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 674.528079] env[69927]: DEBUG nova.virt.hardware [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 674.528079] env[69927]: DEBUG nova.virt.hardware [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 674.528079] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3888ea6b-da1d-4cd6-99e2-a80e1f018319 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.541250] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70119b9f-c5c6-45c1-a753-b0089db20b67 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.627411] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Releasing lock "refresh_cache-9348e368-cc3c-4bde-91ae-26fd03ad536a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 674.627411] env[69927]: DEBUG nova.compute.manager [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Instance network_info: |[{"id": "67c7df0a-bb67-40ee-9a2d-11cea9dbacb7", "address": "fa:16:3e:89:98:e2", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.164", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67c7df0a-bb", "ovs_interfaceid": "67c7df0a-bb67-40ee-9a2d-11cea9dbacb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 674.629848] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:98:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '10b81051-1eb1-406b-888c-4548c470c77e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67c7df0a-bb67-40ee-9a2d-11cea9dbacb7', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 674.639203] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Creating folder: Project (be278be46f3d48df818c834df17c663f). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 674.639555] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-092840a1-be47-4e7c-a6ea-7c20d20af733 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.651364] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Created folder: Project (be278be46f3d48df818c834df17c663f) in parent group-v811283. [ 674.652023] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Creating folder: Instances. Parent ref: group-v811306. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 674.652023] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-315580b8-6c84-41bd-abdc-bde3f95e172e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.662532] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Created folder: Instances in parent group-v811306. [ 674.662532] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 674.662804] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 674.663143] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6e6a4889-5497-4a09-92ce-4a69d58fdbf0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.685393] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 674.685393] env[69927]: value = "task-4095382" [ 674.685393] env[69927]: _type = "Task" [ 674.685393] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.696211] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095382, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.715502] env[69927]: DEBUG oslo_concurrency.lockutils [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Acquiring lock "6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 674.718896] env[69927]: DEBUG oslo_concurrency.lockutils [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Lock "6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 674.864299] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095377, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.769779} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.864846] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 053f6f00-a818-473b-a887-4ec45174c1d5/053f6f00-a818-473b-a887-4ec45174c1d5.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 674.865402] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 674.865885] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aca33803-8935-46ee-b19b-ce68f44c6084 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.877452] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Waiting for the task: (returnval){ [ 674.877452] env[69927]: value = "task-4095383" [ 674.877452] env[69927]: _type = "Task" [ 674.877452] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.892088] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Acquiring lock "8442f144-2be4-4634-b151-62f049a975b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 674.892475] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Lock "8442f144-2be4-4634-b151-62f049a975b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 674.897118] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095383, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.941039] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Task: {'id': task-4095379, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.961997] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d52ac7-a974-fc79-b6ed-56d7dc15a2ed, 'name': SearchDatastore_Task, 'duration_secs': 0.060205} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.961997] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62a94e0b-6d16-4735-99dd-23585f6831a2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.968591] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Waiting for the task: (returnval){ [ 674.968591] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52124b11-6189-326f-572c-81ced6a989c2" [ 674.968591] env[69927]: _type = "Task" [ 674.968591] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.978151] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52124b11-6189-326f-572c-81ced6a989c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.032233] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance ab8a8acc-cab7-4a82-bd90-b34147f17b0e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 675.100321] env[69927]: DEBUG nova.compute.manager [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Received event network-vif-plugged-ee4c8bc2-33ea-425e-adbc-5a75ab428943 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 675.100379] env[69927]: DEBUG oslo_concurrency.lockutils [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] Acquiring lock "2cdfda66-1d93-4960-a129-2788f10fa593-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 675.100549] env[69927]: DEBUG oslo_concurrency.lockutils [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] Lock "2cdfda66-1d93-4960-a129-2788f10fa593-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 675.102227] env[69927]: DEBUG oslo_concurrency.lockutils [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] Lock "2cdfda66-1d93-4960-a129-2788f10fa593-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 675.102522] env[69927]: DEBUG nova.compute.manager [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] No waiting events found dispatching network-vif-plugged-ee4c8bc2-33ea-425e-adbc-5a75ab428943 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 675.102681] env[69927]: WARNING nova.compute.manager [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Received unexpected event network-vif-plugged-ee4c8bc2-33ea-425e-adbc-5a75ab428943 for instance with vm_state building and task_state spawning. [ 675.103646] env[69927]: DEBUG nova.compute.manager [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Received event network-changed-ee4c8bc2-33ea-425e-adbc-5a75ab428943 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 675.103646] env[69927]: DEBUG nova.compute.manager [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Refreshing instance network info cache due to event network-changed-ee4c8bc2-33ea-425e-adbc-5a75ab428943. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 675.103646] env[69927]: DEBUG oslo_concurrency.lockutils [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] Acquiring lock "refresh_cache-2cdfda66-1d93-4960-a129-2788f10fa593" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.103646] env[69927]: DEBUG oslo_concurrency.lockutils [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] Acquired lock "refresh_cache-2cdfda66-1d93-4960-a129-2788f10fa593" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 675.103646] env[69927]: DEBUG nova.network.neutron [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Refreshing network info cache for port ee4c8bc2-33ea-425e-adbc-5a75ab428943 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 675.204618] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095382, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.390933] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095383, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.174962} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.391314] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 675.392268] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e028682-ca93-4c8c-93f1-0990159b815d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.417373] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Reconfiguring VM instance instance-00000003 to attach disk [datastore2] 053f6f00-a818-473b-a887-4ec45174c1d5/053f6f00-a818-473b-a887-4ec45174c1d5.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 675.417817] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6e3a15c-5579-4e48-b0d6-9fad1c836ef1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.449287] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Task: {'id': task-4095379, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.670347} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.450654] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] c3c36508-96e1-4e75-931b-c7f0740b74e1/c3c36508-96e1-4e75-931b-c7f0740b74e1.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 675.450924] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 675.451332] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Waiting for the task: (returnval){ [ 675.451332] env[69927]: value = "task-4095384" [ 675.451332] env[69927]: _type = "Task" [ 675.451332] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.451552] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0b8d2557-7ffe-427b-8aa5-594c5bf4e153 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.466477] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095384, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.471048] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Waiting for the task: (returnval){ [ 675.471048] env[69927]: value = "task-4095385" [ 675.471048] env[69927]: _type = "Task" [ 675.471048] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.480413] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Task: {'id': task-4095385, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.484171] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52124b11-6189-326f-572c-81ced6a989c2, 'name': SearchDatastore_Task, 'duration_secs': 0.059323} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.484507] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 675.484802] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] c45d2259-2a05-49d5-81eb-4c79ced83121/c45d2259-2a05-49d5-81eb-4c79ced83121.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 675.485139] env[69927]: DEBUG oslo_concurrency.lockutils [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 675.485354] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 675.485625] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-49d35531-b898-463d-b04f-80b33a65a384 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.487793] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d32c5a70-64e9-4a44-a94f-6c762812d1d8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.497128] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Waiting for the task: (returnval){ [ 675.497128] env[69927]: value = "task-4095386" [ 675.497128] env[69927]: _type = "Task" [ 675.497128] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.498410] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 675.498623] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 675.502556] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a14bc529-1866-4dfe-b360-d5fcc17a789e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.512044] env[69927]: DEBUG oslo_vmware.api [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Waiting for the task: (returnval){ [ 675.512044] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f08ce6-7bff-561b-32c8-c4bcaad8bbb6" [ 675.512044] env[69927]: _type = "Task" [ 675.512044] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.512647] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095386, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.522205] env[69927]: DEBUG oslo_vmware.api [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f08ce6-7bff-561b-32c8-c4bcaad8bbb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.535429] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance b1bcbcfb-2320-434c-901f-0f6a476a3069 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 675.535429] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 2cdfda66-1d93-4960-a129-2788f10fa593 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 675.535615] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 4ad26720-ed24-4963-9519-3345dbfeb9a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 675.535615] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 053f6f00-a818-473b-a887-4ec45174c1d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 675.535728] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 820c50b9-3c18-41bc-a000-22425b1dbb27 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 675.535820] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 9348e368-cc3c-4bde-91ae-26fd03ad536a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 675.536073] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance c3c36508-96e1-4e75-931b-c7f0740b74e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 675.701874] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095382, 'name': CreateVM_Task, 'duration_secs': 0.653833} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.702503] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 675.703864] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.704209] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 675.704605] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 675.705260] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79b82736-89f7-456d-85b8-2599f1f470bd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.712256] env[69927]: DEBUG oslo_vmware.api [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 675.712256] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525b006f-ed22-22aa-3080-c6dd06b2c89c" [ 675.712256] env[69927]: _type = "Task" [ 675.712256] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.732421] env[69927]: DEBUG oslo_vmware.api [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525b006f-ed22-22aa-3080-c6dd06b2c89c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.735850] env[69927]: DEBUG oslo_concurrency.lockutils [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "128d0705-21a0-4103-ae84-85bbac7e718b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 675.736112] env[69927]: DEBUG oslo_concurrency.lockutils [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "128d0705-21a0-4103-ae84-85bbac7e718b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 675.943442] env[69927]: DEBUG nova.network.neutron [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Updated VIF entry in instance network info cache for port ee4c8bc2-33ea-425e-adbc-5a75ab428943. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 675.943442] env[69927]: DEBUG nova.network.neutron [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Updating instance_info_cache with network_info: [{"id": "ee4c8bc2-33ea-425e-adbc-5a75ab428943", "address": "fa:16:3e:3a:01:21", "network": {"id": "8cfd4578-2ab0-4814-804e-a670f4f295dc", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-814812168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "731443ca96b749c1861bf82f6f36c238", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee4c8bc2-33", "ovs_interfaceid": "ee4c8bc2-33ea-425e-adbc-5a75ab428943", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.971145] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095384, 'name': ReconfigVM_Task, 'duration_secs': 0.491457} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.976688] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Reconfigured VM instance instance-00000003 to attach disk [datastore2] 053f6f00-a818-473b-a887-4ec45174c1d5/053f6f00-a818-473b-a887-4ec45174c1d5.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 675.977690] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ac80ea6b-6658-4664-b00b-d71e2116f579 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.987148] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Task: {'id': task-4095385, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079488} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.988761] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 675.989131] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Waiting for the task: (returnval){ [ 675.989131] env[69927]: value = "task-4095387" [ 675.989131] env[69927]: _type = "Task" [ 675.989131] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.989849] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af76674-6d32-4848-b250-7f7bfcfb428e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.024295] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] c3c36508-96e1-4e75-931b-c7f0740b74e1/c3c36508-96e1-4e75-931b-c7f0740b74e1.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 676.025313] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095387, 'name': Rename_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.029744] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b55f39f4-4fd2-46cb-9ce7-a795bfd1ba6e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.050931] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 8edafb98-331a-45b8-8de8-4ba04b035ffd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 676.051346] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance c45d2259-2a05-49d5-81eb-4c79ced83121 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 676.051346] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 7ce79e41-333a-4ef3-ba68-f74067d4ac5a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 676.051606] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 676.052963] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2048MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 676.054176] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095386, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50193} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.055277] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] c45d2259-2a05-49d5-81eb-4c79ced83121/c45d2259-2a05-49d5-81eb-4c79ced83121.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 676.055529] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 676.056791] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-71a954c7-25b1-43a0-a8a7-eca57ebb87b5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.063398] env[69927]: DEBUG oslo_vmware.api [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f08ce6-7bff-561b-32c8-c4bcaad8bbb6, 'name': SearchDatastore_Task, 'duration_secs': 0.020345} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.067799] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Waiting for the task: (returnval){ [ 676.067799] env[69927]: value = "task-4095388" [ 676.067799] env[69927]: _type = "Task" [ 676.067799] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.068879] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e908da36-eeb2-475d-a1c9-f1d5425d3d62 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.076510] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Waiting for the task: (returnval){ [ 676.076510] env[69927]: value = "task-4095389" [ 676.076510] env[69927]: _type = "Task" [ 676.076510] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.088628] env[69927]: DEBUG oslo_vmware.api [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Waiting for the task: (returnval){ [ 676.088628] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52084f30-871c-8646-c798-81fcf4be69fc" [ 676.088628] env[69927]: _type = "Task" [ 676.088628] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.089057] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Task: {'id': task-4095388, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.101951] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095389, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.109329] env[69927]: DEBUG oslo_vmware.api [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52084f30-871c-8646-c798-81fcf4be69fc, 'name': SearchDatastore_Task, 'duration_secs': 0.012398} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.113313] env[69927]: DEBUG oslo_concurrency.lockutils [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 676.113719] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 2cdfda66-1d93-4960-a129-2788f10fa593/2cdfda66-1d93-4960-a129-2788f10fa593.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 676.114481] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-468f1961-32a9-4c25-b3e7-9bb9d26294e3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.124435] env[69927]: DEBUG oslo_vmware.api [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Waiting for the task: (returnval){ [ 676.124435] env[69927]: value = "task-4095390" [ 676.124435] env[69927]: _type = "Task" [ 676.124435] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.137274] env[69927]: DEBUG oslo_vmware.api [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095390, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.230237] env[69927]: DEBUG oslo_vmware.api [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525b006f-ed22-22aa-3080-c6dd06b2c89c, 'name': SearchDatastore_Task, 'duration_secs': 0.052985} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.230237] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 676.230237] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 676.230721] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.230721] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 676.230846] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 676.231438] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-21e4a04a-cf4f-494f-8b1e-57d5102bdfac {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.242103] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 676.242103] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 676.245622] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe002d78-d2f8-42e0-8a09-1f3353d7685d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.256946] env[69927]: DEBUG oslo_vmware.api [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 676.256946] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528030e8-7d27-b1a4-23a5-835738d9136d" [ 676.256946] env[69927]: _type = "Task" [ 676.256946] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.279803] env[69927]: DEBUG oslo_vmware.api [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528030e8-7d27-b1a4-23a5-835738d9136d, 'name': SearchDatastore_Task, 'duration_secs': 0.011906} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.282815] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0fd3a5c-d2e6-4aef-ac81-0fdf53b18284 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.291814] env[69927]: DEBUG oslo_vmware.api [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 676.291814] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5299da3c-118f-3257-cc9b-77b2bd838dfe" [ 676.291814] env[69927]: _type = "Task" [ 676.291814] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.307764] env[69927]: DEBUG oslo_vmware.api [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5299da3c-118f-3257-cc9b-77b2bd838dfe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.363009] env[69927]: DEBUG nova.network.neutron [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Successfully updated port: b112a351-376b-4433-94a9-e8e186f3dff3 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 676.427903] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe8a886d-e994-4624-bbf8-9e4dde73ad88 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.442817] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-953ff64a-b0e7-459d-907f-bf6f674ed589 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.447836] env[69927]: DEBUG oslo_concurrency.lockutils [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] Releasing lock "refresh_cache-2cdfda66-1d93-4960-a129-2788f10fa593" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 676.448233] env[69927]: DEBUG nova.compute.manager [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Received event network-vif-plugged-67c7df0a-bb67-40ee-9a2d-11cea9dbacb7 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 676.448452] env[69927]: DEBUG oslo_concurrency.lockutils [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] Acquiring lock "9348e368-cc3c-4bde-91ae-26fd03ad536a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 676.448657] env[69927]: DEBUG oslo_concurrency.lockutils [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] Lock "9348e368-cc3c-4bde-91ae-26fd03ad536a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 676.449104] env[69927]: DEBUG oslo_concurrency.lockutils [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] Lock "9348e368-cc3c-4bde-91ae-26fd03ad536a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.449104] env[69927]: DEBUG nova.compute.manager [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] No waiting events found dispatching network-vif-plugged-67c7df0a-bb67-40ee-9a2d-11cea9dbacb7 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 676.449443] env[69927]: WARNING nova.compute.manager [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Received unexpected event network-vif-plugged-67c7df0a-bb67-40ee-9a2d-11cea9dbacb7 for instance with vm_state building and task_state spawning. [ 676.449750] env[69927]: DEBUG nova.compute.manager [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Received event network-changed-67c7df0a-bb67-40ee-9a2d-11cea9dbacb7 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 676.449936] env[69927]: DEBUG nova.compute.manager [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Refreshing instance network info cache due to event network-changed-67c7df0a-bb67-40ee-9a2d-11cea9dbacb7. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 676.450215] env[69927]: DEBUG oslo_concurrency.lockutils [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] Acquiring lock "refresh_cache-9348e368-cc3c-4bde-91ae-26fd03ad536a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.450368] env[69927]: DEBUG oslo_concurrency.lockutils [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] Acquired lock "refresh_cache-9348e368-cc3c-4bde-91ae-26fd03ad536a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 676.450592] env[69927]: DEBUG nova.network.neutron [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Refreshing network info cache for port 67c7df0a-bb67-40ee-9a2d-11cea9dbacb7 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 676.491030] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b599c6-8d7a-4552-abcb-c03f33763d4b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.501593] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d98b42dd-73ea-455b-bc83-d74c82719bcd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.510961] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095387, 'name': Rename_Task, 'duration_secs': 0.172092} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.512205] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 676.512205] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6fa4633e-574e-4abf-b45b-995b09fa5481 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.525238] env[69927]: DEBUG nova.compute.provider_tree [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 676.534602] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Waiting for the task: (returnval){ [ 676.534602] env[69927]: value = "task-4095391" [ 676.534602] env[69927]: _type = "Task" [ 676.534602] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.552038] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095391, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.588008] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Task: {'id': task-4095388, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.595679] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095389, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0812} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.595679] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 676.596622] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c1f6597-3d53-4c66-9e29-6110c7a29f33 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.619919] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] c45d2259-2a05-49d5-81eb-4c79ced83121/c45d2259-2a05-49d5-81eb-4c79ced83121.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 676.621060] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-40a1c319-2286-49fe-9adb-d58a45c162e7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.647853] env[69927]: DEBUG oslo_vmware.api [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095390, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.51424} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.649046] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 2cdfda66-1d93-4960-a129-2788f10fa593/2cdfda66-1d93-4960-a129-2788f10fa593.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 676.649304] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 676.649617] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Waiting for the task: (returnval){ [ 676.649617] env[69927]: value = "task-4095392" [ 676.649617] env[69927]: _type = "Task" [ 676.649617] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.649835] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f93bbd5e-754b-4e64-a2d5-4b04f5c9fdf7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.660275] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095392, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.661809] env[69927]: DEBUG oslo_vmware.api [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Waiting for the task: (returnval){ [ 676.661809] env[69927]: value = "task-4095393" [ 676.661809] env[69927]: _type = "Task" [ 676.661809] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.672265] env[69927]: DEBUG oslo_vmware.api [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095393, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.801970] env[69927]: DEBUG oslo_vmware.api [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5299da3c-118f-3257-cc9b-77b2bd838dfe, 'name': SearchDatastore_Task, 'duration_secs': 0.028659} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.804262] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 676.804262] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 9348e368-cc3c-4bde-91ae-26fd03ad536a/9348e368-cc3c-4bde-91ae-26fd03ad536a.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 676.804262] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d7dc41bc-0ace-49bc-9850-32d5a1c492ee {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.811473] env[69927]: DEBUG oslo_vmware.api [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 676.811473] env[69927]: value = "task-4095394" [ 676.811473] env[69927]: _type = "Task" [ 676.811473] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.823764] env[69927]: DEBUG oslo_vmware.api [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095394, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.871250] env[69927]: DEBUG oslo_concurrency.lockutils [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Acquiring lock "refresh_cache-7ce79e41-333a-4ef3-ba68-f74067d4ac5a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.871250] env[69927]: DEBUG oslo_concurrency.lockutils [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Acquired lock "refresh_cache-7ce79e41-333a-4ef3-ba68-f74067d4ac5a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 676.871250] env[69927]: DEBUG nova.network.neutron [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 677.030281] env[69927]: DEBUG nova.scheduler.client.report [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 677.050317] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095391, 'name': PowerOnVM_Task} progress is 90%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.089183] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Task: {'id': task-4095388, 'name': ReconfigVM_Task, 'duration_secs': 0.821694} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.091119] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Reconfigured VM instance instance-00000004 to attach disk [datastore2] c3c36508-96e1-4e75-931b-c7f0740b74e1/c3c36508-96e1-4e75-931b-c7f0740b74e1.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 677.092681] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd805e45-7e97-45f0-a394-8c497f5305f6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.106458] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Waiting for the task: (returnval){ [ 677.106458] env[69927]: value = "task-4095395" [ 677.106458] env[69927]: _type = "Task" [ 677.106458] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.126451] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Task: {'id': task-4095395, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.170114] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095392, 'name': ReconfigVM_Task, 'duration_secs': 0.406445} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.170863] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Reconfigured VM instance instance-00000007 to attach disk [datastore2] c45d2259-2a05-49d5-81eb-4c79ced83121/c45d2259-2a05-49d5-81eb-4c79ced83121.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 677.171513] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fe37c412-0482-4f6a-bc3b-f3cd96893e03 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.177208] env[69927]: DEBUG oslo_vmware.api [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095393, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085996} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.177339] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 677.182193] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9abdbe2-040d-4baf-8ace-2547e0174227 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.185264] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Waiting for the task: (returnval){ [ 677.185264] env[69927]: value = "task-4095396" [ 677.185264] env[69927]: _type = "Task" [ 677.185264] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.214706] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] 2cdfda66-1d93-4960-a129-2788f10fa593/2cdfda66-1d93-4960-a129-2788f10fa593.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 677.215974] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-889f50ab-a4a4-45d9-8628-026831a919c1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.242191] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095396, 'name': Rename_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.253415] env[69927]: DEBUG oslo_vmware.api [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Waiting for the task: (returnval){ [ 677.253415] env[69927]: value = "task-4095397" [ 677.253415] env[69927]: _type = "Task" [ 677.253415] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.269032] env[69927]: DEBUG oslo_vmware.api [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095397, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.321963] env[69927]: DEBUG oslo_vmware.api [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095394, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.428533] env[69927]: DEBUG nova.network.neutron [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 677.459739] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Acquiring lock "820c50b9-3c18-41bc-a000-22425b1dbb27" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.459995] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Lock "820c50b9-3c18-41bc-a000-22425b1dbb27" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 677.460421] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Acquiring lock "820c50b9-3c18-41bc-a000-22425b1dbb27-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.460672] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Lock "820c50b9-3c18-41bc-a000-22425b1dbb27-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 677.460878] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Lock "820c50b9-3c18-41bc-a000-22425b1dbb27-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 677.467625] env[69927]: INFO nova.compute.manager [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Terminating instance [ 677.546804] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69927) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 677.547105] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.110s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 677.547426] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.254s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 677.549084] env[69927]: INFO nova.compute.claims [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 677.565654] env[69927]: DEBUG oslo_vmware.api [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095391, 'name': PowerOnVM_Task, 'duration_secs': 0.599031} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.565654] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 677.565654] env[69927]: INFO nova.compute.manager [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Took 16.03 seconds to spawn the instance on the hypervisor. [ 677.565654] env[69927]: DEBUG nova.compute.manager [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 677.565654] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5015d41-5afc-47d6-96ac-fd6efee520fb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.590294] env[69927]: DEBUG nova.network.neutron [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Updated VIF entry in instance network info cache for port 67c7df0a-bb67-40ee-9a2d-11cea9dbacb7. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 677.591548] env[69927]: DEBUG nova.network.neutron [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Updating instance_info_cache with network_info: [{"id": "67c7df0a-bb67-40ee-9a2d-11cea9dbacb7", "address": "fa:16:3e:89:98:e2", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.164", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67c7df0a-bb", "ovs_interfaceid": "67c7df0a-bb67-40ee-9a2d-11cea9dbacb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.630788] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Task: {'id': task-4095395, 'name': Rename_Task, 'duration_secs': 0.335519} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.631128] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 677.631402] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-df8163f2-ccd7-4e42-8b36-366108f0d32a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.647986] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Waiting for the task: (returnval){ [ 677.647986] env[69927]: value = "task-4095398" [ 677.647986] env[69927]: _type = "Task" [ 677.647986] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.651308] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquiring lock "a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.651500] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lock "a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 677.664704] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Task: {'id': task-4095398, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.698311] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095396, 'name': Rename_Task, 'duration_secs': 0.41903} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.698623] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 677.699564] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-602a36b9-2436-4fd7-8b0c-496d1615eaf1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.702966] env[69927]: DEBUG nova.network.neutron [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Updating instance_info_cache with network_info: [{"id": "b112a351-376b-4433-94a9-e8e186f3dff3", "address": "fa:16:3e:b5:a4:e8", "network": {"id": "0344fc7a-5b52-4c14-9a09-fd627f1d3d43", "bridge": "br-int", "label": "tempest-ServersTestJSON-28061948-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc73b36a965a4503b546cc90a2950441", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb112a351-37", "ovs_interfaceid": "b112a351-376b-4433-94a9-e8e186f3dff3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.707231] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Waiting for the task: (returnval){ [ 677.707231] env[69927]: value = "task-4095399" [ 677.707231] env[69927]: _type = "Task" [ 677.707231] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.716729] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095399, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.766408] env[69927]: DEBUG oslo_vmware.api [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095397, 'name': ReconfigVM_Task, 'duration_secs': 0.39077} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.766700] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Reconfigured VM instance instance-00000005 to attach disk [datastore2] 2cdfda66-1d93-4960-a129-2788f10fa593/2cdfda66-1d93-4960-a129-2788f10fa593.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 677.767387] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0423e184-3746-4db4-95d1-c9fe08c3d672 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.776212] env[69927]: DEBUG oslo_vmware.api [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Waiting for the task: (returnval){ [ 677.776212] env[69927]: value = "task-4095400" [ 677.776212] env[69927]: _type = "Task" [ 677.776212] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.787735] env[69927]: DEBUG oslo_vmware.api [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095400, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.827522] env[69927]: DEBUG oslo_vmware.api [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095394, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.588445} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.827786] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 9348e368-cc3c-4bde-91ae-26fd03ad536a/9348e368-cc3c-4bde-91ae-26fd03ad536a.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 677.828019] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 677.828325] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-954e6f31-99be-46a9-966d-afd5eec7ea5d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.837365] env[69927]: DEBUG oslo_vmware.api [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 677.837365] env[69927]: value = "task-4095401" [ 677.837365] env[69927]: _type = "Task" [ 677.837365] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.847965] env[69927]: DEBUG oslo_vmware.api [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095401, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.972013] env[69927]: DEBUG nova.compute.manager [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 677.972308] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 677.973275] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ab6c4f-ae8b-4589-9194-a619a57b5cd8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.983413] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 677.983579] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7bc3e09a-c175-4ff8-a87f-069f7747286d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.991698] env[69927]: DEBUG oslo_vmware.api [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Waiting for the task: (returnval){ [ 677.991698] env[69927]: value = "task-4095402" [ 677.991698] env[69927]: _type = "Task" [ 677.991698] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.008210] env[69927]: DEBUG oslo_vmware.api [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Task: {'id': task-4095402, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.092025] env[69927]: INFO nova.compute.manager [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Took 24.15 seconds to build instance. [ 678.097896] env[69927]: DEBUG oslo_concurrency.lockutils [req-9cc65f82-0baf-489f-a8a8-eb593ddbd15e req-c58e38dd-fef3-405d-9888-e58b21d4a63f service nova] Releasing lock "refresh_cache-9348e368-cc3c-4bde-91ae-26fd03ad536a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 678.164827] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Task: {'id': task-4095398, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.206173] env[69927]: DEBUG oslo_concurrency.lockutils [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Releasing lock "refresh_cache-7ce79e41-333a-4ef3-ba68-f74067d4ac5a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 678.206518] env[69927]: DEBUG nova.compute.manager [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Instance network_info: |[{"id": "b112a351-376b-4433-94a9-e8e186f3dff3", "address": "fa:16:3e:b5:a4:e8", "network": {"id": "0344fc7a-5b52-4c14-9a09-fd627f1d3d43", "bridge": "br-int", "label": "tempest-ServersTestJSON-28061948-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc73b36a965a4503b546cc90a2950441", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb112a351-37", "ovs_interfaceid": "b112a351-376b-4433-94a9-e8e186f3dff3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 678.208666] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:a4:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '60567ee6-01d0-4b16-9c7a-4a896827d6eb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b112a351-376b-4433-94a9-e8e186f3dff3', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 678.217816] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Creating folder: Project (fc73b36a965a4503b546cc90a2950441). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 678.218267] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-02db1aa7-49fe-4c22-806b-f8bce798f642 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.233041] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095399, 'name': PowerOnVM_Task} progress is 1%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.239248] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Created folder: Project (fc73b36a965a4503b546cc90a2950441) in parent group-v811283. [ 678.240707] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Creating folder: Instances. Parent ref: group-v811309. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 678.240707] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-38824c64-17e6-4ae5-b2d0-bd210ec1fc14 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.253051] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Created folder: Instances in parent group-v811309. [ 678.253485] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 678.253773] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 678.253893] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-150186e0-6674-4f70-bef6-68872bfb809b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.277787] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 678.277787] env[69927]: value = "task-4095405" [ 678.277787] env[69927]: _type = "Task" [ 678.277787] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.292474] env[69927]: DEBUG oslo_vmware.api [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095400, 'name': Rename_Task, 'duration_secs': 0.221148} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.297465] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 678.297465] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095405, 'name': CreateVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.297465] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7bb00dce-e11c-47d0-b171-892038d2f0ff {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.304251] env[69927]: DEBUG oslo_vmware.api [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Waiting for the task: (returnval){ [ 678.304251] env[69927]: value = "task-4095406" [ 678.304251] env[69927]: _type = "Task" [ 678.304251] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.315602] env[69927]: DEBUG oslo_vmware.api [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095406, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.348643] env[69927]: DEBUG oslo_vmware.api [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095401, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.138601} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.349377] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 678.349878] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e865fc02-cc61-44a7-a9aa-1826285eb5cd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.376884] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] 9348e368-cc3c-4bde-91ae-26fd03ad536a/9348e368-cc3c-4bde-91ae-26fd03ad536a.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 678.376884] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79eea441-a241-4a49-828b-06d6edeaa17d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.400546] env[69927]: DEBUG oslo_vmware.api [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 678.400546] env[69927]: value = "task-4095407" [ 678.400546] env[69927]: _type = "Task" [ 678.400546] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.413425] env[69927]: DEBUG oslo_vmware.api [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095407, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.505356] env[69927]: DEBUG oslo_vmware.api [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Task: {'id': task-4095402, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.593551] env[69927]: DEBUG oslo_concurrency.lockutils [None req-730ad7ac-ee2b-45f2-abab-e595487d424d tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Lock "053f6f00-a818-473b-a887-4ec45174c1d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.668s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 678.668265] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Task: {'id': task-4095398, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.736890] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095399, 'name': PowerOnVM_Task} progress is 64%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.801271] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095405, 'name': CreateVM_Task, 'duration_secs': 0.490857} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.801577] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 678.803480] env[69927]: DEBUG oslo_concurrency.lockutils [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.803480] env[69927]: DEBUG oslo_concurrency.lockutils [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 678.803480] env[69927]: DEBUG oslo_concurrency.lockutils [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 678.803480] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd8ac4ce-c9aa-42f5-b7f7-be7f10fa8441 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.822843] env[69927]: DEBUG oslo_vmware.api [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095406, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.827328] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Waiting for the task: (returnval){ [ 678.827328] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a80295-b950-42f6-b508-5c42a4c40768" [ 678.827328] env[69927]: _type = "Task" [ 678.827328] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.838546] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a80295-b950-42f6-b508-5c42a4c40768, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.912113] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d587e30f-8769-4c01-8939-ddcd07ccd26a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.923084] env[69927]: DEBUG oslo_vmware.api [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095407, 'name': ReconfigVM_Task, 'duration_secs': 0.420953} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.924404] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80bf1cb0-8b62-4067-a4a2-e2471c197711 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.928823] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Reconfigured VM instance instance-00000006 to attach disk [datastore2] 9348e368-cc3c-4bde-91ae-26fd03ad536a/9348e368-cc3c-4bde-91ae-26fd03ad536a.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 678.929635] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-205058e0-4786-4987-aed1-89f0a36c109b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.970391] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a379fb2a-8a30-4f56-aaa0-2fc43a5ee0cd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.973683] env[69927]: DEBUG oslo_vmware.api [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 678.973683] env[69927]: value = "task-4095408" [ 678.973683] env[69927]: _type = "Task" [ 678.973683] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.982055] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b92328f-ae20-47a5-98ff-822e819fa9ba {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.990790] env[69927]: DEBUG oslo_vmware.api [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095408, 'name': Rename_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.007606] env[69927]: DEBUG nova.compute.provider_tree [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 679.017836] env[69927]: DEBUG oslo_vmware.api [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Task: {'id': task-4095402, 'name': PowerOffVM_Task, 'duration_secs': 0.664518} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.018239] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 679.018594] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 679.018814] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6e583941-a0bc-48f4-a398-52750a40521c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.099255] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 679.099491] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 679.099671] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Deleting the datastore file [datastore1] 820c50b9-3c18-41bc-a000-22425b1dbb27 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 679.099970] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1600c55-6913-4a4d-a07d-6291bd26d31e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.102371] env[69927]: DEBUG nova.compute.manager [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 679.111726] env[69927]: DEBUG oslo_vmware.api [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Waiting for the task: (returnval){ [ 679.111726] env[69927]: value = "task-4095410" [ 679.111726] env[69927]: _type = "Task" [ 679.111726] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.120590] env[69927]: DEBUG oslo_vmware.api [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Task: {'id': task-4095410, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.164194] env[69927]: DEBUG oslo_vmware.api [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Task: {'id': task-4095398, 'name': PowerOnVM_Task, 'duration_secs': 1.418354} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.164446] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 679.164744] env[69927]: INFO nova.compute.manager [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Took 14.23 seconds to spawn the instance on the hypervisor. [ 679.165018] env[69927]: DEBUG nova.compute.manager [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 679.165965] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8c48fba-22bf-4371-bf60-9996b4387bf2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.235703] env[69927]: DEBUG oslo_vmware.api [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095399, 'name': PowerOnVM_Task, 'duration_secs': 1.328268} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.235987] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 679.236211] env[69927]: INFO nova.compute.manager [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Took 7.16 seconds to spawn the instance on the hypervisor. [ 679.236399] env[69927]: DEBUG nova.compute.manager [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 679.237285] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a51045d3-f20c-42a6-8451-4426cdd11c9d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.318620] env[69927]: DEBUG oslo_vmware.api [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095406, 'name': PowerOnVM_Task, 'duration_secs': 0.64308} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.318919] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 679.319116] env[69927]: INFO nova.compute.manager [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Took 12.01 seconds to spawn the instance on the hypervisor. [ 679.319386] env[69927]: DEBUG nova.compute.manager [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 679.320189] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a477fe11-e4ea-4a57-996d-c496f72bcfa1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.340198] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a80295-b950-42f6-b508-5c42a4c40768, 'name': SearchDatastore_Task, 'duration_secs': 0.032253} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.340510] env[69927]: DEBUG oslo_concurrency.lockutils [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 679.340739] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 679.340972] env[69927]: DEBUG oslo_concurrency.lockutils [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.341130] env[69927]: DEBUG oslo_concurrency.lockutils [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 679.341338] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 679.341603] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-29b2f9c6-9b39-4b20-88f1-d5f53098935a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.353992] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 679.354295] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 679.355362] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f80c24d4-8817-4c6e-b734-93a67b246329 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.368749] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Waiting for the task: (returnval){ [ 679.368749] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5240922c-895b-13f5-09dc-57b2ef70ce6c" [ 679.368749] env[69927]: _type = "Task" [ 679.368749] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.383759] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5240922c-895b-13f5-09dc-57b2ef70ce6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.418264] env[69927]: DEBUG nova.compute.manager [req-af389196-ec58-4e87-b081-b35c34a35fe3 req-a14570a9-730b-42f4-b23a-f2d5ecc9c773 service nova] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Received event network-vif-plugged-b112a351-376b-4433-94a9-e8e186f3dff3 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 679.418482] env[69927]: DEBUG oslo_concurrency.lockutils [req-af389196-ec58-4e87-b081-b35c34a35fe3 req-a14570a9-730b-42f4-b23a-f2d5ecc9c773 service nova] Acquiring lock "7ce79e41-333a-4ef3-ba68-f74067d4ac5a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 679.418690] env[69927]: DEBUG oslo_concurrency.lockutils [req-af389196-ec58-4e87-b081-b35c34a35fe3 req-a14570a9-730b-42f4-b23a-f2d5ecc9c773 service nova] Lock "7ce79e41-333a-4ef3-ba68-f74067d4ac5a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.418854] env[69927]: DEBUG oslo_concurrency.lockutils [req-af389196-ec58-4e87-b081-b35c34a35fe3 req-a14570a9-730b-42f4-b23a-f2d5ecc9c773 service nova] Lock "7ce79e41-333a-4ef3-ba68-f74067d4ac5a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.419688] env[69927]: DEBUG nova.compute.manager [req-af389196-ec58-4e87-b081-b35c34a35fe3 req-a14570a9-730b-42f4-b23a-f2d5ecc9c773 service nova] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] No waiting events found dispatching network-vif-plugged-b112a351-376b-4433-94a9-e8e186f3dff3 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 679.421499] env[69927]: WARNING nova.compute.manager [req-af389196-ec58-4e87-b081-b35c34a35fe3 req-a14570a9-730b-42f4-b23a-f2d5ecc9c773 service nova] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Received unexpected event network-vif-plugged-b112a351-376b-4433-94a9-e8e186f3dff3 for instance with vm_state building and task_state spawning. [ 679.421797] env[69927]: DEBUG nova.compute.manager [req-af389196-ec58-4e87-b081-b35c34a35fe3 req-a14570a9-730b-42f4-b23a-f2d5ecc9c773 service nova] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Received event network-changed-b112a351-376b-4433-94a9-e8e186f3dff3 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 679.422100] env[69927]: DEBUG nova.compute.manager [req-af389196-ec58-4e87-b081-b35c34a35fe3 req-a14570a9-730b-42f4-b23a-f2d5ecc9c773 service nova] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Refreshing instance network info cache due to event network-changed-b112a351-376b-4433-94a9-e8e186f3dff3. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 679.422235] env[69927]: DEBUG oslo_concurrency.lockutils [req-af389196-ec58-4e87-b081-b35c34a35fe3 req-a14570a9-730b-42f4-b23a-f2d5ecc9c773 service nova] Acquiring lock "refresh_cache-7ce79e41-333a-4ef3-ba68-f74067d4ac5a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.422323] env[69927]: DEBUG oslo_concurrency.lockutils [req-af389196-ec58-4e87-b081-b35c34a35fe3 req-a14570a9-730b-42f4-b23a-f2d5ecc9c773 service nova] Acquired lock "refresh_cache-7ce79e41-333a-4ef3-ba68-f74067d4ac5a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 679.422468] env[69927]: DEBUG nova.network.neutron [req-af389196-ec58-4e87-b081-b35c34a35fe3 req-a14570a9-730b-42f4-b23a-f2d5ecc9c773 service nova] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Refreshing network info cache for port b112a351-376b-4433-94a9-e8e186f3dff3 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 679.487088] env[69927]: DEBUG oslo_vmware.api [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095408, 'name': Rename_Task, 'duration_secs': 0.300995} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.487915] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 679.489082] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c7c54b2f-bc7c-4551-a434-f261ed4dcf47 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.499740] env[69927]: DEBUG oslo_vmware.api [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 679.499740] env[69927]: value = "task-4095411" [ 679.499740] env[69927]: _type = "Task" [ 679.499740] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.512178] env[69927]: DEBUG oslo_vmware.api [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095411, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.516738] env[69927]: DEBUG nova.scheduler.client.report [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 679.625213] env[69927]: DEBUG oslo_vmware.api [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Task: {'id': task-4095410, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.393356} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.625850] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 679.625850] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 679.626306] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 679.626306] env[69927]: INFO nova.compute.manager [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Took 1.65 seconds to destroy the instance on the hypervisor. [ 679.626927] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 679.626927] env[69927]: DEBUG nova.compute.manager [-] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 679.626927] env[69927]: DEBUG nova.network.neutron [-] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 679.635266] env[69927]: DEBUG oslo_concurrency.lockutils [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 679.686103] env[69927]: INFO nova.compute.manager [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Took 22.79 seconds to build instance. [ 679.758221] env[69927]: INFO nova.compute.manager [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Took 22.04 seconds to build instance. [ 679.848114] env[69927]: INFO nova.compute.manager [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Took 22.77 seconds to build instance. [ 679.884657] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5240922c-895b-13f5-09dc-57b2ef70ce6c, 'name': SearchDatastore_Task, 'duration_secs': 0.021192} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.887608] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6f612dd-9d8a-4edc-a032-3f7cbae9a3f6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.896939] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Waiting for the task: (returnval){ [ 679.896939] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52060ea3-8a44-e405-b2f1-3313fffe7f41" [ 679.896939] env[69927]: _type = "Task" [ 679.896939] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.912410] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52060ea3-8a44-e405-b2f1-3313fffe7f41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.958494] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Acquiring lock "5f67d6a0-e4b7-435e-8991-0f54e0379d22" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 679.958494] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Lock "5f67d6a0-e4b7-435e-8991-0f54e0379d22" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 680.015451] env[69927]: DEBUG oslo_vmware.api [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095411, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.025066] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.475s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 680.025066] env[69927]: DEBUG nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 680.029434] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.655s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 680.033732] env[69927]: INFO nova.compute.claims [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 680.190238] env[69927]: DEBUG oslo_concurrency.lockutils [None req-05effcaf-9d7b-4350-94c4-7ea86a3d4830 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Lock "c3c36508-96e1-4e75-931b-c7f0740b74e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.311s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 680.260045] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6101ccc-1711-48de-ae1b-3227c10b8c52 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Lock "c45d2259-2a05-49d5-81eb-4c79ced83121" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.550s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 680.350327] env[69927]: DEBUG oslo_concurrency.lockutils [None req-befbc9ca-5067-40d0-8483-84e0ecfc6e20 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Lock "2cdfda66-1d93-4960-a129-2788f10fa593" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.280s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 680.412745] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52060ea3-8a44-e405-b2f1-3313fffe7f41, 'name': SearchDatastore_Task, 'duration_secs': 0.015081} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.413031] env[69927]: DEBUG oslo_concurrency.lockutils [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 680.415289] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 7ce79e41-333a-4ef3-ba68-f74067d4ac5a/7ce79e41-333a-4ef3-ba68-f74067d4ac5a.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 680.415289] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d10582ea-58be-45a8-8e8d-669cc1711152 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.422969] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Waiting for the task: (returnval){ [ 680.422969] env[69927]: value = "task-4095412" [ 680.422969] env[69927]: _type = "Task" [ 680.422969] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.439959] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Task: {'id': task-4095412, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.513426] env[69927]: DEBUG oslo_vmware.api [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095411, 'name': PowerOnVM_Task, 'duration_secs': 0.701483} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.513989] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 680.514353] env[69927]: INFO nova.compute.manager [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Took 10.88 seconds to spawn the instance on the hypervisor. [ 680.514659] env[69927]: DEBUG nova.compute.manager [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 680.517149] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e7a715-29d7-4f38-a121-7cfe41ee6c81 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.539863] env[69927]: DEBUG nova.compute.utils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 680.543644] env[69927]: DEBUG nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 680.543854] env[69927]: DEBUG nova.network.neutron [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 680.614693] env[69927]: DEBUG nova.network.neutron [req-af389196-ec58-4e87-b081-b35c34a35fe3 req-a14570a9-730b-42f4-b23a-f2d5ecc9c773 service nova] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Updated VIF entry in instance network info cache for port b112a351-376b-4433-94a9-e8e186f3dff3. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 680.614954] env[69927]: DEBUG nova.network.neutron [req-af389196-ec58-4e87-b081-b35c34a35fe3 req-a14570a9-730b-42f4-b23a-f2d5ecc9c773 service nova] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Updating instance_info_cache with network_info: [{"id": "b112a351-376b-4433-94a9-e8e186f3dff3", "address": "fa:16:3e:b5:a4:e8", "network": {"id": "0344fc7a-5b52-4c14-9a09-fd627f1d3d43", "bridge": "br-int", "label": "tempest-ServersTestJSON-28061948-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc73b36a965a4503b546cc90a2950441", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb112a351-37", "ovs_interfaceid": "b112a351-376b-4433-94a9-e8e186f3dff3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.694687] env[69927]: DEBUG nova.compute.manager [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 680.764280] env[69927]: DEBUG nova.compute.manager [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 680.770613] env[69927]: DEBUG nova.policy [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bcdc91b3659746cda7ae7320eeea25bc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '66dfd282dce84de1b56e8271ff7b0318', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 680.852954] env[69927]: DEBUG nova.compute.manager [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 680.939374] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Task: {'id': task-4095412, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.049155] env[69927]: INFO nova.compute.manager [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Took 23.64 seconds to build instance. [ 681.053144] env[69927]: DEBUG nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 681.121663] env[69927]: DEBUG oslo_concurrency.lockutils [req-af389196-ec58-4e87-b081-b35c34a35fe3 req-a14570a9-730b-42f4-b23a-f2d5ecc9c773 service nova] Releasing lock "refresh_cache-7ce79e41-333a-4ef3-ba68-f74067d4ac5a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 681.192995] env[69927]: DEBUG nova.network.neutron [-] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.221741] env[69927]: DEBUG oslo_concurrency.lockutils [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.256236] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Acquiring lock "6e698775-2556-4cbe-b65f-0cc3efa7bcf6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.256236] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Lock "6e698775-2556-4cbe-b65f-0cc3efa7bcf6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.295582] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.387820] env[69927]: DEBUG oslo_concurrency.lockutils [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.441240] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Task: {'id': task-4095412, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.819901} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.441702] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 7ce79e41-333a-4ef3-ba68-f74067d4ac5a/7ce79e41-333a-4ef3-ba68-f74067d4ac5a.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 681.444020] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 681.444020] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d1d351c7-1122-4ebd-afcc-94d06de12778 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.453715] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Waiting for the task: (returnval){ [ 681.453715] env[69927]: value = "task-4095413" [ 681.453715] env[69927]: _type = "Task" [ 681.453715] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.468131] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Task: {'id': task-4095413, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.470419] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f24ad5ca-7de4-4a89-85cb-081c24ded6d5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.481305] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec9e1655-6345-4f3f-a90e-ee132403f62c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.522339] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed5780b-e13c-44f1-83e5-2b0d9d589f3b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.531866] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2901172f-dbd2-4e6a-9b80-ef53bda47d73 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.548013] env[69927]: DEBUG nova.compute.provider_tree [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 681.564917] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6dd03dbb-75f7-418f-8250-c5990d740aac tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "9348e368-cc3c-4bde-91ae-26fd03ad536a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.170s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 681.696829] env[69927]: INFO nova.compute.manager [-] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Took 2.07 seconds to deallocate network for instance. [ 681.763350] env[69927]: DEBUG nova.network.neutron [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Successfully created port: 6c93c813-14c8-4f76-918b-6efb59510588 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 681.966558] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Task: {'id': task-4095413, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.102255} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.967027] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 681.968190] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-035c390f-3610-457c-918a-319b8f8bebb2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.997261] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Reconfiguring VM instance instance-00000008 to attach disk [datastore2] 7ce79e41-333a-4ef3-ba68-f74067d4ac5a/7ce79e41-333a-4ef3-ba68-f74067d4ac5a.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 681.997763] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a42b2b36-cc6a-4ced-8935-0de14b091c84 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.024625] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Waiting for the task: (returnval){ [ 682.024625] env[69927]: value = "task-4095414" [ 682.024625] env[69927]: _type = "Task" [ 682.024625] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.034566] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Task: {'id': task-4095414, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.051908] env[69927]: DEBUG nova.scheduler.client.report [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 682.068093] env[69927]: DEBUG nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 682.071738] env[69927]: DEBUG nova.compute.manager [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 682.101832] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 682.101832] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 682.102207] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 682.102207] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 682.102366] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 682.102518] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 682.104669] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 682.104669] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 682.104669] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 682.104669] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 682.104669] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 682.105270] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62b8e94d-970f-4305-a78c-feb64789fb05 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.116504] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7e5947-f55a-4690-9111-e021212ba3ea {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.205471] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.329814] env[69927]: DEBUG nova.compute.manager [None req-5b35fad7-633e-4494-b40f-269e334dc21b tempest-ServerDiagnosticsV248Test-931281013 tempest-ServerDiagnosticsV248Test-931281013-project-admin] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 682.331962] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-272e4ab3-c7a4-4fb3-9971-1799f10d14f3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.340721] env[69927]: INFO nova.compute.manager [None req-5b35fad7-633e-4494-b40f-269e334dc21b tempest-ServerDiagnosticsV248Test-931281013 tempest-ServerDiagnosticsV248Test-931281013-project-admin] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Retrieving diagnostics [ 682.341882] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a70937-a096-410b-9105-569e530cc931 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.537624] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Task: {'id': task-4095414, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.559191] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.530s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 682.562024] env[69927]: DEBUG nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 682.563948] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.138s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.566497] env[69927]: INFO nova.compute.claims [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 682.612621] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.719229] env[69927]: DEBUG oslo_concurrency.lockutils [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] Acquiring lock "053f6f00-a818-473b-a887-4ec45174c1d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.719471] env[69927]: DEBUG oslo_concurrency.lockutils [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] Lock "053f6f00-a818-473b-a887-4ec45174c1d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.719709] env[69927]: DEBUG oslo_concurrency.lockutils [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] Acquiring lock "053f6f00-a818-473b-a887-4ec45174c1d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.719898] env[69927]: DEBUG oslo_concurrency.lockutils [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] Lock "053f6f00-a818-473b-a887-4ec45174c1d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.720173] env[69927]: DEBUG oslo_concurrency.lockutils [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] Lock "053f6f00-a818-473b-a887-4ec45174c1d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 682.724369] env[69927]: INFO nova.compute.manager [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Terminating instance [ 683.026594] env[69927]: DEBUG nova.compute.manager [req-34896f4f-561c-4611-a3af-139542f82282 req-09af8276-11f1-4152-b4c4-5cc9cb9c3ef1 service nova] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Received event network-vif-deleted-ea8b5988-95e7-4a5b-b062-03d4d72eb9e5 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 683.040778] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Task: {'id': task-4095414, 'name': ReconfigVM_Task, 'duration_secs': 0.939287} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.040778] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Reconfigured VM instance instance-00000008 to attach disk [datastore2] 7ce79e41-333a-4ef3-ba68-f74067d4ac5a/7ce79e41-333a-4ef3-ba68-f74067d4ac5a.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 683.040778] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b5aba2c1-f6ea-4737-827e-0b2afbea136d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.049347] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Waiting for the task: (returnval){ [ 683.049347] env[69927]: value = "task-4095415" [ 683.049347] env[69927]: _type = "Task" [ 683.049347] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.063224] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Task: {'id': task-4095415, 'name': Rename_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.074405] env[69927]: DEBUG nova.compute.utils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 683.076034] env[69927]: DEBUG nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 683.076241] env[69927]: DEBUG nova.network.neutron [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 683.151383] env[69927]: DEBUG nova.policy [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bcdc91b3659746cda7ae7320eeea25bc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '66dfd282dce84de1b56e8271ff7b0318', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 683.232325] env[69927]: DEBUG nova.compute.manager [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 683.232745] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 683.233889] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b7dfcd1-7958-4ddf-990a-7a16e6f51d3c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.243432] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 683.243725] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eba37bd7-6654-4905-b5c1-8321fe8c20ce {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.253047] env[69927]: DEBUG oslo_vmware.api [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] Waiting for the task: (returnval){ [ 683.253047] env[69927]: value = "task-4095416" [ 683.253047] env[69927]: _type = "Task" [ 683.253047] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.263280] env[69927]: DEBUG oslo_vmware.api [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] Task: {'id': task-4095416, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.561102] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Task: {'id': task-4095415, 'name': Rename_Task, 'duration_secs': 0.22887} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.561425] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 683.562291] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a3c1f72b-7dce-4f70-bba4-b5861bda052d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.572063] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Waiting for the task: (returnval){ [ 683.572063] env[69927]: value = "task-4095417" [ 683.572063] env[69927]: _type = "Task" [ 683.572063] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.581495] env[69927]: DEBUG nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 683.601426] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Task: {'id': task-4095417, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.614514] env[69927]: DEBUG nova.network.neutron [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Successfully created port: 50ea3bc5-fa5d-49db-99d9-e842cb85c0d8 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 683.770023] env[69927]: DEBUG oslo_vmware.api [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] Task: {'id': task-4095416, 'name': PowerOffVM_Task, 'duration_secs': 0.429222} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.770023] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 683.770023] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 683.770023] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-699d98a9-878e-4a8f-9c45-c48e5a05ff3d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.848296] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 683.848463] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 683.848607] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] Deleting the datastore file [datastore2] 053f6f00-a818-473b-a887-4ec45174c1d5 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 683.849184] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c94f05f5-a6cd-4023-8ec0-0a1e7e5bb23e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.859276] env[69927]: DEBUG oslo_vmware.api [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] Waiting for the task: (returnval){ [ 683.859276] env[69927]: value = "task-4095419" [ 683.859276] env[69927]: _type = "Task" [ 683.859276] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.880048] env[69927]: DEBUG oslo_vmware.api [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] Task: {'id': task-4095419, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.982914] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b50dd7cf-2d5e-4d98-98b6-c3ce98da0ebe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.993053] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8feae54-e1f2-4862-ba32-a659305f3ac3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.029373] env[69927]: DEBUG oslo_concurrency.lockutils [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Acquiring lock "c3c36508-96e1-4e75-931b-c7f0740b74e1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 684.029723] env[69927]: DEBUG oslo_concurrency.lockutils [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Lock "c3c36508-96e1-4e75-931b-c7f0740b74e1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.029985] env[69927]: DEBUG oslo_concurrency.lockutils [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Acquiring lock "c3c36508-96e1-4e75-931b-c7f0740b74e1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 684.030208] env[69927]: DEBUG oslo_concurrency.lockutils [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Lock "c3c36508-96e1-4e75-931b-c7f0740b74e1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.030384] env[69927]: DEBUG oslo_concurrency.lockutils [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Lock "c3c36508-96e1-4e75-931b-c7f0740b74e1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 684.033072] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6fff7b8-84c6-4b78-8109-7e1fb3cc7f74 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.036301] env[69927]: INFO nova.compute.manager [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Terminating instance [ 684.045528] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ccf947d-9a3c-40bb-b258-bdb8268925b6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.061279] env[69927]: DEBUG nova.compute.provider_tree [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 684.083121] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Task: {'id': task-4095417, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.370253] env[69927]: DEBUG oslo_vmware.api [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] Task: {'id': task-4095419, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.214027} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.373444] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 684.373444] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 684.373444] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 684.373444] env[69927]: INFO nova.compute.manager [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Took 1.14 seconds to destroy the instance on the hypervisor. [ 684.373444] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 684.375091] env[69927]: DEBUG nova.compute.manager [-] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 684.375091] env[69927]: DEBUG nova.network.neutron [-] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 684.542916] env[69927]: DEBUG nova.compute.manager [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 684.543169] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 684.544076] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17c7e831-94f0-4a7e-99e1-062082078c5e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.553514] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 684.553799] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f0f55cdc-51d5-4ef3-822b-223e7a2fadec {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.564628] env[69927]: DEBUG nova.scheduler.client.report [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 684.570293] env[69927]: DEBUG oslo_vmware.api [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Waiting for the task: (returnval){ [ 684.570293] env[69927]: value = "task-4095420" [ 684.570293] env[69927]: _type = "Task" [ 684.570293] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.588604] env[69927]: DEBUG oslo_vmware.api [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Task: {'id': task-4095417, 'name': PowerOnVM_Task, 'duration_secs': 0.727319} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.592046] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 684.592301] env[69927]: INFO nova.compute.manager [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Took 10.11 seconds to spawn the instance on the hypervisor. [ 684.592486] env[69927]: DEBUG nova.compute.manager [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 684.593117] env[69927]: DEBUG oslo_vmware.api [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Task: {'id': task-4095420, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.593886] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487578c7-0212-43db-a13f-8520361961ed {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.603967] env[69927]: DEBUG nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 684.633660] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 684.633957] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 684.634184] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 684.634522] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 684.634600] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 684.634941] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 684.636313] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 684.636644] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 684.636797] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 684.636985] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 684.637391] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 684.639192] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b1110d3-dd6b-4fe0-947f-90f9813a4d2c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.649492] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ed4d3a-5a18-4ff9-8aed-d5e9793c5e1e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.958075] env[69927]: DEBUG nova.compute.manager [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 684.959015] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f178f40-7f4f-41fb-9ece-e8781a24cdc5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.076025] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.510s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 685.076025] env[69927]: DEBUG nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 685.077423] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.437s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 685.078758] env[69927]: INFO nova.compute.claims [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 685.096065] env[69927]: DEBUG oslo_vmware.api [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Task: {'id': task-4095420, 'name': PowerOffVM_Task, 'duration_secs': 0.223712} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.096395] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 685.096603] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 685.096916] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-28db50ca-34c9-40b6-9ad4-5d740f3d6c16 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.130293] env[69927]: INFO nova.compute.manager [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Took 24.99 seconds to build instance. [ 685.353100] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 685.353651] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 685.353992] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Deleting the datastore file [datastore2] c3c36508-96e1-4e75-931b-c7f0740b74e1 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 685.354150] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-60475727-b75a-4d26-ba78-f5dbbb9d51ab {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.364963] env[69927]: DEBUG oslo_vmware.api [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Waiting for the task: (returnval){ [ 685.364963] env[69927]: value = "task-4095422" [ 685.364963] env[69927]: _type = "Task" [ 685.364963] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.375674] env[69927]: DEBUG oslo_vmware.api [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Task: {'id': task-4095422, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.415205] env[69927]: DEBUG nova.network.neutron [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Successfully updated port: 50ea3bc5-fa5d-49db-99d9-e842cb85c0d8 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 685.438679] env[69927]: DEBUG nova.network.neutron [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Successfully updated port: 6c93c813-14c8-4f76-918b-6efb59510588 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 685.471870] env[69927]: INFO nova.compute.manager [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] instance snapshotting [ 685.477289] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c6b694c-c52c-403a-9578-967a200653b9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.504683] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0150346-b11f-4ce5-bd79-66c4431fe88d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.592533] env[69927]: DEBUG nova.compute.utils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 685.594058] env[69927]: DEBUG nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 685.594278] env[69927]: DEBUG nova.network.neutron [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 685.632410] env[69927]: DEBUG oslo_concurrency.lockutils [None req-195fe0ef-e0de-44b1-97c7-14f3f1626442 tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Lock "7ce79e41-333a-4ef3-ba68-f74067d4ac5a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.507s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 685.877610] env[69927]: DEBUG oslo_vmware.api [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Task: {'id': task-4095422, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.226619} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.877902] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 685.878115] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 685.878329] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 685.878562] env[69927]: INFO nova.compute.manager [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Took 1.34 seconds to destroy the instance on the hypervisor. [ 685.878860] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 685.879066] env[69927]: DEBUG nova.compute.manager [-] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 685.879184] env[69927]: DEBUG nova.network.neutron [-] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 685.900140] env[69927]: DEBUG nova.policy [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bcdc91b3659746cda7ae7320eeea25bc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '66dfd282dce84de1b56e8271ff7b0318', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 685.920048] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquiring lock "refresh_cache-ab8a8acc-cab7-4a82-bd90-b34147f17b0e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.920266] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquired lock "refresh_cache-ab8a8acc-cab7-4a82-bd90-b34147f17b0e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.920457] env[69927]: DEBUG nova.network.neutron [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 685.942594] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquiring lock "refresh_cache-4bf59fae-8029-421b-95fd-a0d008891ce7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.942757] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquired lock "refresh_cache-4bf59fae-8029-421b-95fd-a0d008891ce7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.942912] env[69927]: DEBUG nova.network.neutron [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 686.016893] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Creating Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 686.017260] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3e5b8722-1f1d-402b-b481-5770963ed565 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.035514] env[69927]: DEBUG oslo_concurrency.lockutils [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "39421358-2d66-4fbe-a4e0-8fdb0b420c5e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.035514] env[69927]: DEBUG oslo_concurrency.lockutils [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "39421358-2d66-4fbe-a4e0-8fdb0b420c5e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.035514] env[69927]: DEBUG oslo_vmware.api [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Waiting for the task: (returnval){ [ 686.035514] env[69927]: value = "task-4095423" [ 686.035514] env[69927]: _type = "Task" [ 686.035514] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.046806] env[69927]: DEBUG oslo_vmware.api [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095423, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.099723] env[69927]: DEBUG nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 686.135780] env[69927]: DEBUG nova.compute.manager [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 686.343435] env[69927]: DEBUG oslo_concurrency.lockutils [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Acquiring lock "4ad26720-ed24-4963-9519-3345dbfeb9a2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.344952] env[69927]: DEBUG oslo_concurrency.lockutils [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Lock "4ad26720-ed24-4963-9519-3345dbfeb9a2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.344952] env[69927]: DEBUG oslo_concurrency.lockutils [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Acquiring lock "4ad26720-ed24-4963-9519-3345dbfeb9a2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.345748] env[69927]: DEBUG oslo_concurrency.lockutils [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Lock "4ad26720-ed24-4963-9519-3345dbfeb9a2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.345748] env[69927]: DEBUG oslo_concurrency.lockutils [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Lock "4ad26720-ed24-4963-9519-3345dbfeb9a2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.350073] env[69927]: INFO nova.compute.manager [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Terminating instance [ 686.396710] env[69927]: DEBUG nova.network.neutron [-] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.480632] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e902e4b-332c-4f89-a0f5-17ea946950de {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.491763] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aadf965a-b732-4e5f-86cb-a30acb1ad282 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.533906] env[69927]: DEBUG nova.network.neutron [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 686.537169] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd013864-c75c-497f-8145-20be4a3a8078 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.550521] env[69927]: DEBUG oslo_vmware.api [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095423, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.551925] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ad0dbac-88d2-408f-96a2-70a393b00fb7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.556519] env[69927]: DEBUG nova.network.neutron [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 686.572347] env[69927]: DEBUG nova.compute.provider_tree [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 686.670266] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.855758] env[69927]: DEBUG oslo_concurrency.lockutils [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Acquiring lock "refresh_cache-4ad26720-ed24-4963-9519-3345dbfeb9a2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.856141] env[69927]: DEBUG oslo_concurrency.lockutils [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Acquired lock "refresh_cache-4ad26720-ed24-4963-9519-3345dbfeb9a2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.856250] env[69927]: DEBUG nova.network.neutron [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 686.866470] env[69927]: DEBUG nova.network.neutron [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Updating instance_info_cache with network_info: [{"id": "6c93c813-14c8-4f76-918b-6efb59510588", "address": "fa:16:3e:40:bc:e9", "network": {"id": "c9bde960-e0b0-435d-a4d7-da374ee28b52", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-166313151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66dfd282dce84de1b56e8271ff7b0318", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c93c813-14", "ovs_interfaceid": "6c93c813-14c8-4f76-918b-6efb59510588", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.898589] env[69927]: INFO nova.compute.manager [-] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Took 2.53 seconds to deallocate network for instance. [ 687.057632] env[69927]: DEBUG oslo_vmware.api [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095423, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.078022] env[69927]: DEBUG nova.scheduler.client.report [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 687.112997] env[69927]: DEBUG nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 687.119963] env[69927]: DEBUG nova.network.neutron [-] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.160913] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 687.161128] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 687.161392] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 687.161831] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 687.161912] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 687.162309] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 687.162309] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 687.367627] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 687.367627] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 687.367627] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 687.367627] env[69927]: DEBUG nova.virt.hardware [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 687.367627] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f01fd806-70bf-4a99-80ab-45bf3cac3633 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.367627] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43dbe983-6de3-4788-b49b-cea6b2fd5c40 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.369377] env[69927]: DEBUG nova.network.neutron [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Updating instance_info_cache with network_info: [{"id": "50ea3bc5-fa5d-49db-99d9-e842cb85c0d8", "address": "fa:16:3e:38:e6:82", "network": {"id": "c9bde960-e0b0-435d-a4d7-da374ee28b52", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-166313151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66dfd282dce84de1b56e8271ff7b0318", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50ea3bc5-fa", "ovs_interfaceid": "50ea3bc5-fa5d-49db-99d9-e842cb85c0d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.369377] env[69927]: DEBUG nova.network.neutron [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Successfully created port: 5aeee040-2531-4d68-871a-2f65a93ad448 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 687.371352] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Releasing lock "refresh_cache-4bf59fae-8029-421b-95fd-a0d008891ce7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.371658] env[69927]: DEBUG nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Instance network_info: |[{"id": "6c93c813-14c8-4f76-918b-6efb59510588", "address": "fa:16:3e:40:bc:e9", "network": {"id": "c9bde960-e0b0-435d-a4d7-da374ee28b52", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-166313151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66dfd282dce84de1b56e8271ff7b0318", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c93c813-14", "ovs_interfaceid": "6c93c813-14c8-4f76-918b-6efb59510588", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 687.372157] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:bc:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3cc0a33d-17c0-4b87-b48f-413a87a4cc6a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6c93c813-14c8-4f76-918b-6efb59510588', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 687.382679] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Creating folder: Project (66dfd282dce84de1b56e8271ff7b0318). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 687.382679] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a62c438-93bb-4c56-bda3-67389c6611cc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.396715] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Created folder: Project (66dfd282dce84de1b56e8271ff7b0318) in parent group-v811283. [ 687.396715] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Creating folder: Instances. Parent ref: group-v811313. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 687.396715] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b3d9dbdc-9668-4bd9-a984-6b6442bce503 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.406434] env[69927]: DEBUG nova.network.neutron [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 687.410511] env[69927]: DEBUG oslo_concurrency.lockutils [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.413118] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Created folder: Instances in parent group-v811313. [ 687.413443] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 687.413603] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 687.413808] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ff511b69-cb79-4710-969a-c7c1877a216b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.454809] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 687.454809] env[69927]: value = "task-4095426" [ 687.454809] env[69927]: _type = "Task" [ 687.454809] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.469443] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095426, 'name': CreateVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.556028] env[69927]: DEBUG oslo_vmware.api [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095423, 'name': CreateSnapshot_Task, 'duration_secs': 1.033941} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.556028] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Created Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 687.556028] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04de5b88-d3a2-4a73-8cce-4ae3e49a084e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.585657] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.508s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.586891] env[69927]: DEBUG nova.compute.manager [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 687.591693] env[69927]: DEBUG oslo_concurrency.lockutils [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.956s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.593540] env[69927]: INFO nova.compute.claims [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 687.621054] env[69927]: INFO nova.compute.manager [-] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Took 1.74 seconds to deallocate network for instance. [ 687.689670] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Releasing lock "refresh_cache-ab8a8acc-cab7-4a82-bd90-b34147f17b0e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.690047] env[69927]: DEBUG nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Instance network_info: |[{"id": "50ea3bc5-fa5d-49db-99d9-e842cb85c0d8", "address": "fa:16:3e:38:e6:82", "network": {"id": "c9bde960-e0b0-435d-a4d7-da374ee28b52", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-166313151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66dfd282dce84de1b56e8271ff7b0318", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50ea3bc5-fa", "ovs_interfaceid": "50ea3bc5-fa5d-49db-99d9-e842cb85c0d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 687.690437] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:e6:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3cc0a33d-17c0-4b87-b48f-413a87a4cc6a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '50ea3bc5-fa5d-49db-99d9-e842cb85c0d8', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 687.699161] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 687.699825] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 687.700116] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7b2bb76a-7bfc-4f32-a3e0-0f53a19a961e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.723615] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 687.723615] env[69927]: value = "task-4095427" [ 687.723615] env[69927]: _type = "Task" [ 687.723615] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.738424] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095427, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.823398] env[69927]: DEBUG nova.network.neutron [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.951130] env[69927]: INFO nova.compute.manager [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Rebuilding instance [ 687.968775] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095426, 'name': CreateVM_Task, 'duration_secs': 0.415069} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.971303] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 687.972232] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.972332] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 687.972604] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 687.972983] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-617c819b-f5ad-40d3-aab9-df01e69c5b39 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.981585] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 687.981585] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5212f974-fa5e-f127-aa02-8f160bffea6d" [ 687.981585] env[69927]: _type = "Task" [ 687.981585] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.001744] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5212f974-fa5e-f127-aa02-8f160bffea6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.031174] env[69927]: DEBUG nova.compute.manager [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 688.031791] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17662c9b-0d8f-40ad-ace8-087f2030171f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.083642] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Creating linked-clone VM from snapshot {{(pid=69927) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 688.084777] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f99d6d7c-2ea6-4819-a3cd-ee20a4372600 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.099153] env[69927]: DEBUG nova.compute.utils [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 688.103078] env[69927]: DEBUG nova.compute.manager [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 688.103341] env[69927]: DEBUG nova.network.neutron [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 688.108067] env[69927]: DEBUG oslo_vmware.api [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Waiting for the task: (returnval){ [ 688.108067] env[69927]: value = "task-4095428" [ 688.108067] env[69927]: _type = "Task" [ 688.108067] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.122019] env[69927]: DEBUG oslo_vmware.api [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095428, 'name': CloneVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.128478] env[69927]: DEBUG oslo_concurrency.lockutils [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 688.232763] env[69927]: DEBUG nova.policy [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3bc9f893fe1f4e109358d3388b3e97fd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dfeb8b05cf5c459587843b3bdf0e1f23', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 688.243463] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095427, 'name': CreateVM_Task, 'duration_secs': 0.477135} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.243463] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 688.244149] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.327676] env[69927]: DEBUG oslo_concurrency.lockutils [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Releasing lock "refresh_cache-4ad26720-ed24-4963-9519-3345dbfeb9a2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 688.328763] env[69927]: DEBUG nova.compute.manager [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 688.329042] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 688.330632] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d1c3ca-3e12-4675-9553-652167632f4a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.341721] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 688.342128] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c23403b9-946f-4dcb-8ba2-c64eac836b21 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.353650] env[69927]: DEBUG oslo_vmware.api [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Waiting for the task: (returnval){ [ 688.353650] env[69927]: value = "task-4095429" [ 688.353650] env[69927]: _type = "Task" [ 688.353650] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.362678] env[69927]: DEBUG oslo_vmware.api [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': task-4095429, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.455145] env[69927]: DEBUG nova.compute.manager [req-4e8c5e4a-2fdb-4570-9179-744c3c1db9b5 req-8d7ddaba-b4ab-4da4-85e2-9dff42cbfd56 service nova] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Received event network-vif-plugged-6c93c813-14c8-4f76-918b-6efb59510588 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 688.455369] env[69927]: DEBUG oslo_concurrency.lockutils [req-4e8c5e4a-2fdb-4570-9179-744c3c1db9b5 req-8d7ddaba-b4ab-4da4-85e2-9dff42cbfd56 service nova] Acquiring lock "4bf59fae-8029-421b-95fd-a0d008891ce7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 688.455651] env[69927]: DEBUG oslo_concurrency.lockutils [req-4e8c5e4a-2fdb-4570-9179-744c3c1db9b5 req-8d7ddaba-b4ab-4da4-85e2-9dff42cbfd56 service nova] Lock "4bf59fae-8029-421b-95fd-a0d008891ce7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 688.455787] env[69927]: DEBUG oslo_concurrency.lockutils [req-4e8c5e4a-2fdb-4570-9179-744c3c1db9b5 req-8d7ddaba-b4ab-4da4-85e2-9dff42cbfd56 service nova] Lock "4bf59fae-8029-421b-95fd-a0d008891ce7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 688.455937] env[69927]: DEBUG nova.compute.manager [req-4e8c5e4a-2fdb-4570-9179-744c3c1db9b5 req-8d7ddaba-b4ab-4da4-85e2-9dff42cbfd56 service nova] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] No waiting events found dispatching network-vif-plugged-6c93c813-14c8-4f76-918b-6efb59510588 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 688.457055] env[69927]: WARNING nova.compute.manager [req-4e8c5e4a-2fdb-4570-9179-744c3c1db9b5 req-8d7ddaba-b4ab-4da4-85e2-9dff42cbfd56 service nova] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Received unexpected event network-vif-plugged-6c93c813-14c8-4f76-918b-6efb59510588 for instance with vm_state building and task_state spawning. [ 688.498541] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5212f974-fa5e-f127-aa02-8f160bffea6d, 'name': SearchDatastore_Task, 'duration_secs': 0.021488} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.498986] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 688.500578] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 688.500578] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.500578] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 688.500578] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 688.501267] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 688.501800] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 688.503670] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bc9ef0d8-adea-4b6b-a2b7-335792a04b3e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.504584] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5726e512-f89e-45b6-980d-9c7b56c58044 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.513435] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 688.513435] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52386c75-9205-2db7-ff98-5191f4ae1703" [ 688.513435] env[69927]: _type = "Task" [ 688.513435] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.518752] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 688.520205] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 688.521452] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab0f5fdf-c5a6-405e-804a-9dadce279639 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.529493] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52386c75-9205-2db7-ff98-5191f4ae1703, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.533769] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 688.533769] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52bdbacf-a363-1b82-d2a7-ac0796bfb2f3" [ 688.533769] env[69927]: _type = "Task" [ 688.533769] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.545855] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52bdbacf-a363-1b82-d2a7-ac0796bfb2f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.613201] env[69927]: DEBUG nova.compute.manager [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 688.633938] env[69927]: DEBUG oslo_vmware.api [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095428, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.765719] env[69927]: DEBUG nova.compute.manager [req-44429628-9b8d-4386-810f-7e8078a2d7a0 req-c4fd4f88-3889-4eec-9c1a-587fccf4f0ed service nova] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Received event network-vif-plugged-50ea3bc5-fa5d-49db-99d9-e842cb85c0d8 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 688.766140] env[69927]: DEBUG oslo_concurrency.lockutils [req-44429628-9b8d-4386-810f-7e8078a2d7a0 req-c4fd4f88-3889-4eec-9c1a-587fccf4f0ed service nova] Acquiring lock "ab8a8acc-cab7-4a82-bd90-b34147f17b0e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 688.766365] env[69927]: DEBUG oslo_concurrency.lockutils [req-44429628-9b8d-4386-810f-7e8078a2d7a0 req-c4fd4f88-3889-4eec-9c1a-587fccf4f0ed service nova] Lock "ab8a8acc-cab7-4a82-bd90-b34147f17b0e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 688.766676] env[69927]: DEBUG oslo_concurrency.lockutils [req-44429628-9b8d-4386-810f-7e8078a2d7a0 req-c4fd4f88-3889-4eec-9c1a-587fccf4f0ed service nova] Lock "ab8a8acc-cab7-4a82-bd90-b34147f17b0e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 688.766884] env[69927]: DEBUG nova.compute.manager [req-44429628-9b8d-4386-810f-7e8078a2d7a0 req-c4fd4f88-3889-4eec-9c1a-587fccf4f0ed service nova] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] No waiting events found dispatching network-vif-plugged-50ea3bc5-fa5d-49db-99d9-e842cb85c0d8 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 688.767093] env[69927]: WARNING nova.compute.manager [req-44429628-9b8d-4386-810f-7e8078a2d7a0 req-c4fd4f88-3889-4eec-9c1a-587fccf4f0ed service nova] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Received unexpected event network-vif-plugged-50ea3bc5-fa5d-49db-99d9-e842cb85c0d8 for instance with vm_state building and task_state spawning. [ 688.809445] env[69927]: DEBUG nova.network.neutron [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Successfully created port: 34f427e8-f150-431f-960f-584ea344457c {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 688.867184] env[69927]: DEBUG oslo_vmware.api [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': task-4095429, 'name': PowerOffVM_Task, 'duration_secs': 0.152999} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.871807] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 688.872093] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 688.872989] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-203e26ca-f306-4799-b65b-b4fa8866ac11 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.883479] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Acquiring lock "a536b069-45e0-4ffe-be53-ac33f8cb6ec0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 688.883479] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lock "a536b069-45e0-4ffe-be53-ac33f8cb6ec0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 688.914084] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 688.917753] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 688.917753] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Deleting the datastore file [datastore1] 4ad26720-ed24-4963-9519-3345dbfeb9a2 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 688.917753] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c05c6aed-8df1-45bc-9f6f-72404f1a4dc5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.929030] env[69927]: DEBUG oslo_vmware.api [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Waiting for the task: (returnval){ [ 688.929030] env[69927]: value = "task-4095431" [ 688.929030] env[69927]: _type = "Task" [ 688.929030] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.940654] env[69927]: DEBUG oslo_vmware.api [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': task-4095431, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.029465] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52386c75-9205-2db7-ff98-5191f4ae1703, 'name': SearchDatastore_Task, 'duration_secs': 0.020868} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.032162] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.032394] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 689.032603] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.053056] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52bdbacf-a363-1b82-d2a7-ac0796bfb2f3, 'name': SearchDatastore_Task, 'duration_secs': 0.010306} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.053585] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 689.054765] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-91743eb6-d91e-4c73-93a3-a020b06ae248 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.057478] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-213d5f49-3eb6-4ef5-b4de-21e58bd2bea2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.068807] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 689.068807] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52eaa36e-f232-7995-66fa-59778675d39b" [ 689.068807] env[69927]: _type = "Task" [ 689.068807] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.076688] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Waiting for the task: (returnval){ [ 689.076688] env[69927]: value = "task-4095432" [ 689.076688] env[69927]: _type = "Task" [ 689.076688] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.087657] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52eaa36e-f232-7995-66fa-59778675d39b, 'name': SearchDatastore_Task, 'duration_secs': 0.010622} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.088586] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.089047] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 4bf59fae-8029-421b-95fd-a0d008891ce7/4bf59fae-8029-421b-95fd-a0d008891ce7.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 689.089228] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.090923] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 689.090923] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a9aef79e-4310-4d7b-af3f-9142bcb261ac {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.095293] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a651a6b-4cb3-41db-8e13-c8c0d99fe834 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.099428] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095432, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.106530] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 689.106530] env[69927]: value = "task-4095433" [ 689.106530] env[69927]: _type = "Task" [ 689.106530] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.113974] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 689.115033] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 689.119855] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e84ef3c-a677-459f-911f-40e26b270041 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.138979] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095433, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.143453] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 689.143453] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5250d98b-6c10-e137-6e48-1e1abc0dd09a" [ 689.143453] env[69927]: _type = "Task" [ 689.143453] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.148321] env[69927]: DEBUG oslo_vmware.api [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095428, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.158747] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5250d98b-6c10-e137-6e48-1e1abc0dd09a, 'name': SearchDatastore_Task, 'duration_secs': 0.011262} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.163075] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36f7f450-b616-412d-acd8-39a9c70c9300 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.173088] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 689.173088] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]527177e3-1a71-f77d-42ab-7883ba71e805" [ 689.173088] env[69927]: _type = "Task" [ 689.173088] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.184371] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]527177e3-1a71-f77d-42ab-7883ba71e805, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.231435] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66c93212-41e9-42cc-9960-c36378891023 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.240902] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d70fd4-fd52-42f8-86ac-9e10bf95b5c3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.296541] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3849d4e-8368-4347-9b9e-d98d44328d54 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.308316] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d49efb6-3aaa-4de5-ac59-fb20a148f7d1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.323859] env[69927]: DEBUG nova.compute.provider_tree [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 689.441885] env[69927]: DEBUG oslo_vmware.api [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Task: {'id': task-4095431, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.106563} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.442097] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 689.442393] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 689.442758] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 689.442972] env[69927]: INFO nova.compute.manager [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Took 1.11 seconds to destroy the instance on the hypervisor. [ 689.443249] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 689.443478] env[69927]: DEBUG nova.compute.manager [-] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 689.443574] env[69927]: DEBUG nova.network.neutron [-] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 689.476067] env[69927]: DEBUG nova.network.neutron [-] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 689.590403] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095432, 'name': PowerOffVM_Task, 'duration_secs': 0.478181} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.590776] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 689.591283] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 689.592070] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3bf8f00-e948-44fb-b9a1-f2ccc03f14bd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.602323] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 689.602834] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-00307146-d2b3-4068-89d0-59f0b147b2f0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.620073] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095433, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.637231] env[69927]: DEBUG oslo_vmware.api [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095428, 'name': CloneVM_Task} progress is 95%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.645435] env[69927]: DEBUG nova.compute.manager [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 689.651998] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 689.652913] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 689.652913] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Deleting the datastore file [datastore2] c45d2259-2a05-49d5-81eb-4c79ced83121 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 689.653232] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-392dd86e-336b-410e-86e1-2369b0efb978 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.664522] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Waiting for the task: (returnval){ [ 689.664522] env[69927]: value = "task-4095435" [ 689.664522] env[69927]: _type = "Task" [ 689.664522] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.675965] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095435, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.689296] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]527177e3-1a71-f77d-42ab-7883ba71e805, 'name': SearchDatastore_Task, 'duration_secs': 0.012464} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.691752] env[69927]: DEBUG nova.virt.hardware [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 689.692027] env[69927]: DEBUG nova.virt.hardware [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 689.692186] env[69927]: DEBUG nova.virt.hardware [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 689.692370] env[69927]: DEBUG nova.virt.hardware [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 689.692516] env[69927]: DEBUG nova.virt.hardware [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 689.692660] env[69927]: DEBUG nova.virt.hardware [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 689.692868] env[69927]: DEBUG nova.virt.hardware [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 689.693033] env[69927]: DEBUG nova.virt.hardware [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 689.693205] env[69927]: DEBUG nova.virt.hardware [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 689.693424] env[69927]: DEBUG nova.virt.hardware [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 689.693607] env[69927]: DEBUG nova.virt.hardware [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 689.695475] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.695475] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] ab8a8acc-cab7-4a82-bd90-b34147f17b0e/ab8a8acc-cab7-4a82-bd90-b34147f17b0e.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 689.695475] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6bec5b4-b7cd-4f87-aca3-d7443738074b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.697906] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bff20963-e494-4ed3-9c5d-8683e7b5624c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.708015] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a4b248c-e9e7-41c3-9106-e328412830b7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.712652] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 689.712652] env[69927]: value = "task-4095436" [ 689.712652] env[69927]: _type = "Task" [ 689.712652] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.730763] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095436, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.827636] env[69927]: DEBUG nova.scheduler.client.report [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 689.953012] env[69927]: DEBUG oslo_concurrency.lockutils [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Acquiring lock "e8e80be6-a82f-4cc5-92fd-366badf519b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.953470] env[69927]: DEBUG oslo_concurrency.lockutils [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Lock "e8e80be6-a82f-4cc5-92fd-366badf519b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.981308] env[69927]: DEBUG nova.network.neutron [-] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.123737] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095433, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.56092} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.123737] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 4bf59fae-8029-421b-95fd-a0d008891ce7/4bf59fae-8029-421b-95fd-a0d008891ce7.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 690.123987] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 690.124231] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f77b03fe-a465-4c0a-8738-702efb34d364 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.139505] env[69927]: DEBUG oslo_vmware.api [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095428, 'name': CloneVM_Task, 'duration_secs': 1.715525} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.142140] env[69927]: INFO nova.virt.vmwareapi.vmops [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Created linked-clone VM from snapshot [ 690.142619] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 690.142619] env[69927]: value = "task-4095437" [ 690.142619] env[69927]: _type = "Task" [ 690.142619] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.144021] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80277346-df37-45ad-a6aa-7084f604a1a8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.162997] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Uploading image 35caae73-182c-41ae-bee8-8a7014804e5e {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 690.169011] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095437, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.190313] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095435, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.253032} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.190632] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 690.190819] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 690.190993] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 690.212561] env[69927]: DEBUG oslo_vmware.rw_handles [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 690.212561] env[69927]: value = "vm-811317" [ 690.212561] env[69927]: _type = "VirtualMachine" [ 690.212561] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 690.213863] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2b185356-892d-451a-8fbb-d569cbd537b9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.232197] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095436, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.235516] env[69927]: DEBUG oslo_vmware.rw_handles [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Lease: (returnval){ [ 690.235516] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523484a1-c881-7cbb-974a-b33c42410711" [ 690.235516] env[69927]: _type = "HttpNfcLease" [ 690.235516] env[69927]: } obtained for exporting VM: (result){ [ 690.235516] env[69927]: value = "vm-811317" [ 690.235516] env[69927]: _type = "VirtualMachine" [ 690.235516] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 690.235516] env[69927]: DEBUG oslo_vmware.api [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Waiting for the lease: (returnval){ [ 690.235516] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523484a1-c881-7cbb-974a-b33c42410711" [ 690.235516] env[69927]: _type = "HttpNfcLease" [ 690.235516] env[69927]: } to be ready. {{(pid=69927) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 690.249746] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 690.249746] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523484a1-c881-7cbb-974a-b33c42410711" [ 690.249746] env[69927]: _type = "HttpNfcLease" [ 690.249746] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 690.273389] env[69927]: DEBUG nova.network.neutron [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Successfully updated port: 5aeee040-2531-4d68-871a-2f65a93ad448 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 690.333742] env[69927]: DEBUG oslo_concurrency.lockutils [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.742s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 690.334644] env[69927]: DEBUG nova.compute.manager [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 690.337597] env[69927]: DEBUG oslo_concurrency.lockutils [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.116s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 690.339297] env[69927]: INFO nova.compute.claims [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 690.484312] env[69927]: INFO nova.compute.manager [-] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Took 1.04 seconds to deallocate network for instance. [ 690.666999] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095437, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.1296} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.666999] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 690.667470] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72181eba-8bf4-4a0b-bd34-9d4ef2c0eae0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.698535] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] 4bf59fae-8029-421b-95fd-a0d008891ce7/4bf59fae-8029-421b-95fd-a0d008891ce7.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 690.703837] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b9884e6-6ea2-4583-bdea-abc3ea535cdd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.731977] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095436, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.640538} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.733417] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] ab8a8acc-cab7-4a82-bd90-b34147f17b0e/ab8a8acc-cab7-4a82-bd90-b34147f17b0e.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 690.733760] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 690.736680] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 690.736680] env[69927]: value = "task-4095439" [ 690.736680] env[69927]: _type = "Task" [ 690.736680] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.736680] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7bc0637d-dbaf-48f9-9c19-3548ea69e0c3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.750430] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095439, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.754347] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 690.754347] env[69927]: value = "task-4095440" [ 690.754347] env[69927]: _type = "Task" [ 690.754347] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.754664] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 690.754664] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523484a1-c881-7cbb-974a-b33c42410711" [ 690.754664] env[69927]: _type = "HttpNfcLease" [ 690.754664] env[69927]: } is ready. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 690.755268] env[69927]: DEBUG oslo_vmware.rw_handles [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 690.755268] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523484a1-c881-7cbb-974a-b33c42410711" [ 690.755268] env[69927]: _type = "HttpNfcLease" [ 690.755268] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 690.760301] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be02b33-9c37-47f7-8ddf-3c10f572102c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.772212] env[69927]: DEBUG oslo_vmware.rw_handles [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f5e7a1-0a94-682b-4e3d-6232c254270e/disk-0.vmdk from lease info. {{(pid=69927) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 690.772581] env[69927]: DEBUG oslo_vmware.rw_handles [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f5e7a1-0a94-682b-4e3d-6232c254270e/disk-0.vmdk for reading. {{(pid=69927) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 690.780266] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquiring lock "refresh_cache-b1bcbcfb-2320-434c-901f-0f6a476a3069" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.780266] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquired lock "refresh_cache-b1bcbcfb-2320-434c-901f-0f6a476a3069" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 690.780266] env[69927]: DEBUG nova.network.neutron [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 690.780266] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095440, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.846465] env[69927]: DEBUG nova.compute.utils [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 690.852815] env[69927]: DEBUG nova.compute.manager [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 690.852815] env[69927]: DEBUG nova.network.neutron [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 690.938826] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-fff99f61-a04b-45c0-89c4-e9fdcca282c0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.947890] env[69927]: DEBUG nova.policy [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af0600578c9343b9acfc437510074fcc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '179addc4042d4c65b15c008132e74bf6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 690.994794] env[69927]: DEBUG oslo_concurrency.lockutils [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.124476] env[69927]: DEBUG oslo_concurrency.lockutils [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquiring lock "77c6ce9e-5e15-41e4-aa81-1ef01248aa32" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.124714] env[69927]: DEBUG oslo_concurrency.lockutils [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Lock "77c6ce9e-5e15-41e4-aa81-1ef01248aa32" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.249861] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095439, 'name': ReconfigVM_Task, 'duration_secs': 0.326806} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.250228] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Reconfigured VM instance instance-00000009 to attach disk [datastore1] 4bf59fae-8029-421b-95fd-a0d008891ce7/4bf59fae-8029-421b-95fd-a0d008891ce7.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 691.250875] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bd613e44-cfdb-4839-a593-baa43462c90b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.267280] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 691.267280] env[69927]: value = "task-4095441" [ 691.267280] env[69927]: _type = "Task" [ 691.267280] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.318857] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095440, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074212} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.325192] env[69927]: DEBUG nova.virt.hardware [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 691.325423] env[69927]: DEBUG nova.virt.hardware [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 691.325577] env[69927]: DEBUG nova.virt.hardware [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 691.326243] env[69927]: DEBUG nova.virt.hardware [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 691.326505] env[69927]: DEBUG nova.virt.hardware [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 691.326748] env[69927]: DEBUG nova.virt.hardware [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 691.327067] env[69927]: DEBUG nova.virt.hardware [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 691.327243] env[69927]: DEBUG nova.virt.hardware [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 691.327416] env[69927]: DEBUG nova.virt.hardware [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 691.327590] env[69927]: DEBUG nova.virt.hardware [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 691.327789] env[69927]: DEBUG nova.virt.hardware [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 691.328573] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 691.331245] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d71606f-3873-4f41-9957-2fc2d4efe8a8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.337066] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b66427b5-dd59-4082-9c64-fcfa33275eb1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.344243] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095441, 'name': Rename_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.364531] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28cf9f0c-e78b-46b1-a806-7b99e824fe4a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.370293] env[69927]: DEBUG nova.compute.manager [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 691.391239] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] ab8a8acc-cab7-4a82-bd90-b34147f17b0e/ab8a8acc-cab7-4a82-bd90-b34147f17b0e.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 691.394018] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cef65da9-250b-4dab-a912-8d25f3b60cf7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.440347] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Instance VIF info [] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 691.447560] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 691.453114] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 691.453987] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 691.453987] env[69927]: value = "task-4095442" [ 691.453987] env[69927]: _type = "Task" [ 691.453987] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.454273] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-18b02947-b9db-4623-b519-d022d2087bd0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.481043] env[69927]: DEBUG nova.network.neutron [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 691.483192] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Acquiring lock "cdf0ea6e-d884-49c1-87ec-cd6de1376c7f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.483628] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Lock "cdf0ea6e-d884-49c1-87ec-cd6de1376c7f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.499043] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095442, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.499827] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 691.499827] env[69927]: value = "task-4095443" [ 691.499827] env[69927]: _type = "Task" [ 691.499827] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.521381] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095443, 'name': CreateVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.745021] env[69927]: DEBUG nova.compute.manager [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Stashing vm_state: active {{(pid=69927) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 691.816539] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095441, 'name': Rename_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.963616] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Acquiring lock "14359034-232d-478f-bf65-cf9937c59229" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.964289] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lock "14359034-232d-478f-bf65-cf9937c59229" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.987069] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095442, 'name': ReconfigVM_Task, 'duration_secs': 0.392811} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.987445] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Reconfigured VM instance instance-0000000a to attach disk [datastore1] ab8a8acc-cab7-4a82-bd90-b34147f17b0e/ab8a8acc-cab7-4a82-bd90-b34147f17b0e.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 691.988477] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-222a6968-e8cf-4651-a1b5-4bcd8627f371 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.000257] env[69927]: DEBUG nova.network.neutron [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Successfully created port: 4e3d0fe6-1171-4bdd-bfab-86bb4f7af637 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 692.008452] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 692.008452] env[69927]: value = "task-4095444" [ 692.008452] env[69927]: _type = "Task" [ 692.008452] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.019439] env[69927]: DEBUG nova.network.neutron [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Successfully updated port: 34f427e8-f150-431f-960f-584ea344457c {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 692.043067] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095444, 'name': Rename_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.044286] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095443, 'name': CreateVM_Task, 'duration_secs': 0.353718} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.050356] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 692.050356] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.050356] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.050356] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 692.050356] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb7d345a-60fb-4620-b853-160aa2738c12 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.054658] env[69927]: DEBUG nova.network.neutron [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Updating instance_info_cache with network_info: [{"id": "5aeee040-2531-4d68-871a-2f65a93ad448", "address": "fa:16:3e:40:b6:6c", "network": {"id": "c9bde960-e0b0-435d-a4d7-da374ee28b52", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-166313151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66dfd282dce84de1b56e8271ff7b0318", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5aeee040-25", "ovs_interfaceid": "5aeee040-2531-4d68-871a-2f65a93ad448", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.057344] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Waiting for the task: (returnval){ [ 692.057344] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52961f1f-4994-171f-9efa-01d4bf412371" [ 692.057344] env[69927]: _type = "Task" [ 692.057344] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.069173] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52961f1f-4994-171f-9efa-01d4bf412371, 'name': SearchDatastore_Task, 'duration_secs': 0.010553} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.070600] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 692.071254] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 692.071254] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.071254] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.071581] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 692.072257] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6460548-ad69-42e3-b209-74b32076eaf6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.075804] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-51e98ebc-e76d-45ed-8cdb-f6cf16c2a120 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.084232] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae1f1e68-5b3b-4650-a258-87b1f5a1a945 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.089098] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 692.089288] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 692.091387] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4912539-9196-426c-a773-0444d4f9dce5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.126974] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d01b46c-c54e-4a2c-a23d-4add38ae2f6e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.131634] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Waiting for the task: (returnval){ [ 692.131634] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52aefe34-8af2-0725-11ee-2f397a9af046" [ 692.131634] env[69927]: _type = "Task" [ 692.131634] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.139700] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f39de696-83a4-4289-958b-fead459269d1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.147507] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52aefe34-8af2-0725-11ee-2f397a9af046, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.160988] env[69927]: DEBUG nova.compute.provider_tree [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 692.265638] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 692.305078] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095441, 'name': Rename_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.401485] env[69927]: DEBUG nova.compute.manager [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 692.431691] env[69927]: DEBUG nova.virt.hardware [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 692.432030] env[69927]: DEBUG nova.virt.hardware [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 692.432119] env[69927]: DEBUG nova.virt.hardware [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 692.432381] env[69927]: DEBUG nova.virt.hardware [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 692.432781] env[69927]: DEBUG nova.virt.hardware [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 692.433044] env[69927]: DEBUG nova.virt.hardware [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 692.433406] env[69927]: DEBUG nova.virt.hardware [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 692.433565] env[69927]: DEBUG nova.virt.hardware [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 692.433767] env[69927]: DEBUG nova.virt.hardware [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 692.434195] env[69927]: DEBUG nova.virt.hardware [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 692.434516] env[69927]: DEBUG nova.virt.hardware [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 692.435865] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec1b1e8c-85a1-488c-b5f0-32c49230ea50 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.446128] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c738b76e-4494-4b71-8350-7ff10b16bee1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.526246] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Acquiring lock "refresh_cache-8edafb98-331a-45b8-8de8-4ba04b035ffd" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.526476] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Acquired lock "refresh_cache-8edafb98-331a-45b8-8de8-4ba04b035ffd" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.526543] env[69927]: DEBUG nova.network.neutron [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 692.528565] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095444, 'name': Rename_Task, 'duration_secs': 0.170387} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.529122] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 692.529360] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-86423974-8c1a-4575-af59-13e7dde231e9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.538630] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 692.538630] env[69927]: value = "task-4095445" [ 692.538630] env[69927]: _type = "Task" [ 692.538630] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.552684] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095445, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.558974] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Releasing lock "refresh_cache-b1bcbcfb-2320-434c-901f-0f6a476a3069" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 692.559326] env[69927]: DEBUG nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Instance network_info: |[{"id": "5aeee040-2531-4d68-871a-2f65a93ad448", "address": "fa:16:3e:40:b6:6c", "network": {"id": "c9bde960-e0b0-435d-a4d7-da374ee28b52", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-166313151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66dfd282dce84de1b56e8271ff7b0318", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5aeee040-25", "ovs_interfaceid": "5aeee040-2531-4d68-871a-2f65a93ad448", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 692.559739] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:b6:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3cc0a33d-17c0-4b87-b48f-413a87a4cc6a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5aeee040-2531-4d68-871a-2f65a93ad448', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 692.568972] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 692.569938] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 692.570326] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f17ac613-e031-4472-91d7-708a6dda7c2e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.596386] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 692.596386] env[69927]: value = "task-4095446" [ 692.596386] env[69927]: _type = "Task" [ 692.596386] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.613748] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095446, 'name': CreateVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.642997] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52aefe34-8af2-0725-11ee-2f397a9af046, 'name': SearchDatastore_Task, 'duration_secs': 0.013765} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.644116] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46b86b7c-6d83-41ad-b533-50b39a11c1fe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.652113] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Waiting for the task: (returnval){ [ 692.652113] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52c9337b-0d89-f934-e98d-c4142aa3b1d4" [ 692.652113] env[69927]: _type = "Task" [ 692.652113] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.661747] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52c9337b-0d89-f934-e98d-c4142aa3b1d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.665179] env[69927]: DEBUG nova.scheduler.client.report [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 692.805658] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095441, 'name': Rename_Task, 'duration_secs': 1.159523} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.805964] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 692.806222] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c8d20bee-c714-4f40-85e2-716200badd5c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.814892] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 692.814892] env[69927]: value = "task-4095447" [ 692.814892] env[69927]: _type = "Task" [ 692.814892] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.826412] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095447, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.052653] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095445, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.112637] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095446, 'name': CreateVM_Task, 'duration_secs': 0.476997} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.113700] env[69927]: DEBUG nova.network.neutron [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.116699] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 693.117192] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.120225] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 693.120225] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 693.120225] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30a3e41b-7c58-42e1-9d76-f7672a506692 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.124588] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 693.124588] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52553c2a-591f-9401-274e-a03dc4c75e9a" [ 693.124588] env[69927]: _type = "Task" [ 693.124588] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.135196] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52553c2a-591f-9401-274e-a03dc4c75e9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.167982] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52c9337b-0d89-f934-e98d-c4142aa3b1d4, 'name': SearchDatastore_Task, 'duration_secs': 0.013685} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.168644] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 693.168644] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] c45d2259-2a05-49d5-81eb-4c79ced83121/c45d2259-2a05-49d5-81eb-4c79ced83121.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 693.170240] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9fed30b1-825c-4ca0-96b4-08c49bbe36ab {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.171859] env[69927]: DEBUG oslo_concurrency.lockutils [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.834s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 693.172359] env[69927]: DEBUG nova.compute.manager [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 693.175079] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.880s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 693.177920] env[69927]: INFO nova.compute.claims [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 693.186800] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Waiting for the task: (returnval){ [ 693.186800] env[69927]: value = "task-4095448" [ 693.186800] env[69927]: _type = "Task" [ 693.186800] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.198496] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095448, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.334138] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095447, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.455766] env[69927]: DEBUG nova.network.neutron [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Updating instance_info_cache with network_info: [{"id": "34f427e8-f150-431f-960f-584ea344457c", "address": "fa:16:3e:d5:38:60", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.94", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34f427e8-f1", "ovs_interfaceid": "34f427e8-f150-431f-960f-584ea344457c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.550021] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095445, 'name': PowerOnVM_Task, 'duration_secs': 0.56213} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.550491] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 693.550782] env[69927]: INFO nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Took 8.95 seconds to spawn the instance on the hypervisor. [ 693.551071] env[69927]: DEBUG nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 693.552055] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7016e6e3-b69e-4dc0-bb17-51dc05543e1d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.636444] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52553c2a-591f-9401-274e-a03dc4c75e9a, 'name': SearchDatastore_Task, 'duration_secs': 0.016443} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.638042] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 693.638312] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 693.638551] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.638748] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 693.638913] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 693.641060] env[69927]: DEBUG nova.compute.manager [req-c97fc932-82ab-480e-a401-3341b72eef2c req-655e6f64-f1b2-4a7e-bbd6-aa5ca3639e16 service nova] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Received event network-changed-6c93c813-14c8-4f76-918b-6efb59510588 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 693.641520] env[69927]: DEBUG nova.compute.manager [req-c97fc932-82ab-480e-a401-3341b72eef2c req-655e6f64-f1b2-4a7e-bbd6-aa5ca3639e16 service nova] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Refreshing instance network info cache due to event network-changed-6c93c813-14c8-4f76-918b-6efb59510588. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 693.641887] env[69927]: DEBUG oslo_concurrency.lockutils [req-c97fc932-82ab-480e-a401-3341b72eef2c req-655e6f64-f1b2-4a7e-bbd6-aa5ca3639e16 service nova] Acquiring lock "refresh_cache-4bf59fae-8029-421b-95fd-a0d008891ce7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.642093] env[69927]: DEBUG oslo_concurrency.lockutils [req-c97fc932-82ab-480e-a401-3341b72eef2c req-655e6f64-f1b2-4a7e-bbd6-aa5ca3639e16 service nova] Acquired lock "refresh_cache-4bf59fae-8029-421b-95fd-a0d008891ce7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 693.642403] env[69927]: DEBUG nova.network.neutron [req-c97fc932-82ab-480e-a401-3341b72eef2c req-655e6f64-f1b2-4a7e-bbd6-aa5ca3639e16 service nova] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Refreshing network info cache for port 6c93c813-14c8-4f76-918b-6efb59510588 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 693.644189] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-beb27ade-1b5f-41c3-9abd-bdf765f3a40d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.665550] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 693.665550] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 693.665710] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a5fc4ee-53b2-4a3c-9a52-9dd1eaf2d33b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.673299] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 693.673299] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ab0c8e-4c97-7928-6179-07c85dab6373" [ 693.673299] env[69927]: _type = "Task" [ 693.673299] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.687332] env[69927]: DEBUG nova.compute.utils [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 693.690915] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ab0c8e-4c97-7928-6179-07c85dab6373, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.690915] env[69927]: DEBUG nova.compute.manager [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 693.690915] env[69927]: DEBUG nova.network.neutron [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 693.705207] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095448, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.788192] env[69927]: DEBUG nova.policy [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee8722afef874a638d747ac90e142eec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6b03211868624de1bf2dd49a481310e6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 693.829032] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095447, 'name': PowerOnVM_Task, 'duration_secs': 0.53248} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.829032] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 693.829032] env[69927]: INFO nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Took 11.76 seconds to spawn the instance on the hypervisor. [ 693.829032] env[69927]: DEBUG nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 693.829032] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b081b7d-bdb0-4a1f-a5eb-53d1676d68b3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.961022] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Releasing lock "refresh_cache-8edafb98-331a-45b8-8de8-4ba04b035ffd" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 693.961022] env[69927]: DEBUG nova.compute.manager [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Instance network_info: |[{"id": "34f427e8-f150-431f-960f-584ea344457c", "address": "fa:16:3e:d5:38:60", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.94", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34f427e8-f1", "ovs_interfaceid": "34f427e8-f150-431f-960f-584ea344457c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 693.961404] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:38:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '10b81051-1eb1-406b-888c-4548c470c77e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '34f427e8-f150-431f-960f-584ea344457c', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 693.968315] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Creating folder: Project (dfeb8b05cf5c459587843b3bdf0e1f23). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 693.968934] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-404d2950-3c76-494b-9001-000233574664 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.983462] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Created folder: Project (dfeb8b05cf5c459587843b3bdf0e1f23) in parent group-v811283. [ 693.983727] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Creating folder: Instances. Parent ref: group-v811320. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 693.983935] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4e1fa9e5-d297-45cf-90f9-59d6ac72c619 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.995179] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Created folder: Instances in parent group-v811320. [ 693.995510] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 693.995758] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 693.996013] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1e27f9ef-142a-4888-9d82-b475b50ada54 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.019088] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 694.019088] env[69927]: value = "task-4095451" [ 694.019088] env[69927]: _type = "Task" [ 694.019088] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.030213] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095451, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.073233] env[69927]: INFO nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Took 21.73 seconds to build instance. [ 694.189019] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ab0c8e-4c97-7928-6179-07c85dab6373, 'name': SearchDatastore_Task, 'duration_secs': 0.07577} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.190735] env[69927]: DEBUG nova.compute.manager [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 694.198092] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9b534ba-c1ab-490c-839a-decc05bc4d39 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.213512] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 694.213512] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522b4e15-b2c3-bdcb-8298-9acf6464eadc" [ 694.213512] env[69927]: _type = "Task" [ 694.213512] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.218353] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095448, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.823617} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.221909] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] c45d2259-2a05-49d5-81eb-4c79ced83121/c45d2259-2a05-49d5-81eb-4c79ced83121.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 694.222156] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 694.223034] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b6800201-75e1-4d57-aba6-7bc8393532be {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.231851] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522b4e15-b2c3-bdcb-8298-9acf6464eadc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.233790] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Waiting for the task: (returnval){ [ 694.233790] env[69927]: value = "task-4095452" [ 694.233790] env[69927]: _type = "Task" [ 694.233790] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.248428] env[69927]: DEBUG nova.compute.manager [req-a7b34e16-51a5-4bfa-a3e3-148b48fc19f5 req-57671b32-6865-43db-a8b8-225cbd86f9a9 service nova] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Received event network-changed-50ea3bc5-fa5d-49db-99d9-e842cb85c0d8 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 694.248656] env[69927]: DEBUG nova.compute.manager [req-a7b34e16-51a5-4bfa-a3e3-148b48fc19f5 req-57671b32-6865-43db-a8b8-225cbd86f9a9 service nova] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Refreshing instance network info cache due to event network-changed-50ea3bc5-fa5d-49db-99d9-e842cb85c0d8. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 694.250212] env[69927]: DEBUG oslo_concurrency.lockutils [req-a7b34e16-51a5-4bfa-a3e3-148b48fc19f5 req-57671b32-6865-43db-a8b8-225cbd86f9a9 service nova] Acquiring lock "refresh_cache-ab8a8acc-cab7-4a82-bd90-b34147f17b0e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.250212] env[69927]: DEBUG oslo_concurrency.lockutils [req-a7b34e16-51a5-4bfa-a3e3-148b48fc19f5 req-57671b32-6865-43db-a8b8-225cbd86f9a9 service nova] Acquired lock "refresh_cache-ab8a8acc-cab7-4a82-bd90-b34147f17b0e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.250212] env[69927]: DEBUG nova.network.neutron [req-a7b34e16-51a5-4bfa-a3e3-148b48fc19f5 req-57671b32-6865-43db-a8b8-225cbd86f9a9 service nova] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Refreshing network info cache for port 50ea3bc5-fa5d-49db-99d9-e842cb85c0d8 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 694.259280] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095452, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.352632] env[69927]: INFO nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Took 22.08 seconds to build instance. [ 694.526585] env[69927]: DEBUG nova.network.neutron [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Successfully created port: 68ff56b2-b063-423a-93e6-b3aba5245461 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 694.541846] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095451, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.576845] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "ab8a8acc-cab7-4a82-bd90-b34147f17b0e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.253s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.680455] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b152a38-1aba-446e-9e1c-455fced10f0d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.689696] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9fe5e24-a999-4471-9af0-cdf34be22d97 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.732236] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be79c2fb-b842-4a9c-a898-eda698977803 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.745941] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522b4e15-b2c3-bdcb-8298-9acf6464eadc, 'name': SearchDatastore_Task, 'duration_secs': 0.018464} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.749261] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 694.749525] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] b1bcbcfb-2320-434c-901f-0f6a476a3069/b1bcbcfb-2320-434c-901f-0f6a476a3069.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 694.749987] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-61bb3371-a484-46d1-a5da-da2c9d273dbb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.753458] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53dbd144-6924-4fc1-842b-3b5a4ef83174 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.763747] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095452, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076995} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.765568] env[69927]: DEBUG nova.network.neutron [req-c97fc932-82ab-480e-a401-3341b72eef2c req-655e6f64-f1b2-4a7e-bbd6-aa5ca3639e16 service nova] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Updated VIF entry in instance network info cache for port 6c93c813-14c8-4f76-918b-6efb59510588. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 694.765568] env[69927]: DEBUG nova.network.neutron [req-c97fc932-82ab-480e-a401-3341b72eef2c req-655e6f64-f1b2-4a7e-bbd6-aa5ca3639e16 service nova] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Updating instance_info_cache with network_info: [{"id": "6c93c813-14c8-4f76-918b-6efb59510588", "address": "fa:16:3e:40:bc:e9", "network": {"id": "c9bde960-e0b0-435d-a4d7-da374ee28b52", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-166313151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66dfd282dce84de1b56e8271ff7b0318", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c93c813-14", "ovs_interfaceid": "6c93c813-14c8-4f76-918b-6efb59510588", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.768477] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 694.782039] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d68c932-9371-4681-85d0-9f3d1ac7af37 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.786749] env[69927]: DEBUG nova.compute.provider_tree [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 694.791596] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 694.791596] env[69927]: value = "task-4095453" [ 694.791596] env[69927]: _type = "Task" [ 694.791596] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.814558] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] c45d2259-2a05-49d5-81eb-4c79ced83121/c45d2259-2a05-49d5-81eb-4c79ced83121.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 694.816021] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e636f51e-2ef8-47d4-94c7-b23c3c5399b6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.840437] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095453, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.847498] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Waiting for the task: (returnval){ [ 694.847498] env[69927]: value = "task-4095454" [ 694.847498] env[69927]: _type = "Task" [ 694.847498] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.856454] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "4bf59fae-8029-421b-95fd-a0d008891ce7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.591s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.861350] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095454, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.026758] env[69927]: DEBUG nova.network.neutron [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Successfully updated port: 4e3d0fe6-1171-4bdd-bfab-86bb4f7af637 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 695.035164] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095451, 'name': CreateVM_Task, 'duration_secs': 0.638715} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.038024] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 695.038024] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.038024] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.038024] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 695.038024] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5dab1cd-eac0-4835-a6e7-cd0445c1ad1b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.045402] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Waiting for the task: (returnval){ [ 695.045402] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]524f4ca1-5f87-37b9-a20c-c57b8e159bcd" [ 695.045402] env[69927]: _type = "Task" [ 695.045402] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.057374] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]524f4ca1-5f87-37b9-a20c-c57b8e159bcd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.082367] env[69927]: DEBUG nova.compute.manager [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 695.228780] env[69927]: DEBUG nova.compute.manager [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 695.277356] env[69927]: DEBUG nova.virt.hardware [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 695.277356] env[69927]: DEBUG nova.virt.hardware [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 695.277356] env[69927]: DEBUG nova.virt.hardware [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 695.277586] env[69927]: DEBUG nova.virt.hardware [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 695.277586] env[69927]: DEBUG nova.virt.hardware [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 695.277766] env[69927]: DEBUG nova.virt.hardware [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 695.278152] env[69927]: DEBUG nova.virt.hardware [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 695.278479] env[69927]: DEBUG nova.virt.hardware [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 695.278828] env[69927]: DEBUG nova.virt.hardware [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 695.279128] env[69927]: DEBUG nova.virt.hardware [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 695.279410] env[69927]: DEBUG nova.virt.hardware [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 695.280535] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ca63a0e-ffb4-4582-b89e-7da116c05d22 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.285050] env[69927]: DEBUG nova.network.neutron [req-a7b34e16-51a5-4bfa-a3e3-148b48fc19f5 req-57671b32-6865-43db-a8b8-225cbd86f9a9 service nova] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Updated VIF entry in instance network info cache for port 50ea3bc5-fa5d-49db-99d9-e842cb85c0d8. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 695.285581] env[69927]: DEBUG nova.network.neutron [req-a7b34e16-51a5-4bfa-a3e3-148b48fc19f5 req-57671b32-6865-43db-a8b8-225cbd86f9a9 service nova] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Updating instance_info_cache with network_info: [{"id": "50ea3bc5-fa5d-49db-99d9-e842cb85c0d8", "address": "fa:16:3e:38:e6:82", "network": {"id": "c9bde960-e0b0-435d-a4d7-da374ee28b52", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-166313151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66dfd282dce84de1b56e8271ff7b0318", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50ea3bc5-fa", "ovs_interfaceid": "50ea3bc5-fa5d-49db-99d9-e842cb85c0d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.287575] env[69927]: DEBUG oslo_concurrency.lockutils [req-c97fc932-82ab-480e-a401-3341b72eef2c req-655e6f64-f1b2-4a7e-bbd6-aa5ca3639e16 service nova] Releasing lock "refresh_cache-4bf59fae-8029-421b-95fd-a0d008891ce7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.287959] env[69927]: DEBUG nova.compute.manager [req-c97fc932-82ab-480e-a401-3341b72eef2c req-655e6f64-f1b2-4a7e-bbd6-aa5ca3639e16 service nova] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Received event network-vif-deleted-8ed32df7-4147-4bf4-bedd-196b87bbea4d {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 695.290019] env[69927]: DEBUG nova.compute.manager [req-c97fc932-82ab-480e-a401-3341b72eef2c req-655e6f64-f1b2-4a7e-bbd6-aa5ca3639e16 service nova] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Received event network-vif-plugged-5aeee040-2531-4d68-871a-2f65a93ad448 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 695.290019] env[69927]: DEBUG oslo_concurrency.lockutils [req-c97fc932-82ab-480e-a401-3341b72eef2c req-655e6f64-f1b2-4a7e-bbd6-aa5ca3639e16 service nova] Acquiring lock "b1bcbcfb-2320-434c-901f-0f6a476a3069-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.290019] env[69927]: DEBUG oslo_concurrency.lockutils [req-c97fc932-82ab-480e-a401-3341b72eef2c req-655e6f64-f1b2-4a7e-bbd6-aa5ca3639e16 service nova] Lock "b1bcbcfb-2320-434c-901f-0f6a476a3069-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.290019] env[69927]: DEBUG oslo_concurrency.lockutils [req-c97fc932-82ab-480e-a401-3341b72eef2c req-655e6f64-f1b2-4a7e-bbd6-aa5ca3639e16 service nova] Lock "b1bcbcfb-2320-434c-901f-0f6a476a3069-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.290019] env[69927]: DEBUG nova.compute.manager [req-c97fc932-82ab-480e-a401-3341b72eef2c req-655e6f64-f1b2-4a7e-bbd6-aa5ca3639e16 service nova] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] No waiting events found dispatching network-vif-plugged-5aeee040-2531-4d68-871a-2f65a93ad448 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 695.290333] env[69927]: WARNING nova.compute.manager [req-c97fc932-82ab-480e-a401-3341b72eef2c req-655e6f64-f1b2-4a7e-bbd6-aa5ca3639e16 service nova] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Received unexpected event network-vif-plugged-5aeee040-2531-4d68-871a-2f65a93ad448 for instance with vm_state building and task_state spawning. [ 695.290333] env[69927]: DEBUG nova.compute.manager [req-c97fc932-82ab-480e-a401-3341b72eef2c req-655e6f64-f1b2-4a7e-bbd6-aa5ca3639e16 service nova] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Received event network-changed-5aeee040-2531-4d68-871a-2f65a93ad448 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 695.290333] env[69927]: DEBUG nova.compute.manager [req-c97fc932-82ab-480e-a401-3341b72eef2c req-655e6f64-f1b2-4a7e-bbd6-aa5ca3639e16 service nova] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Refreshing instance network info cache due to event network-changed-5aeee040-2531-4d68-871a-2f65a93ad448. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 695.290333] env[69927]: DEBUG oslo_concurrency.lockutils [req-c97fc932-82ab-480e-a401-3341b72eef2c req-655e6f64-f1b2-4a7e-bbd6-aa5ca3639e16 service nova] Acquiring lock "refresh_cache-b1bcbcfb-2320-434c-901f-0f6a476a3069" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.290333] env[69927]: DEBUG oslo_concurrency.lockutils [req-c97fc932-82ab-480e-a401-3341b72eef2c req-655e6f64-f1b2-4a7e-bbd6-aa5ca3639e16 service nova] Acquired lock "refresh_cache-b1bcbcfb-2320-434c-901f-0f6a476a3069" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.290613] env[69927]: DEBUG nova.network.neutron [req-c97fc932-82ab-480e-a401-3341b72eef2c req-655e6f64-f1b2-4a7e-bbd6-aa5ca3639e16 service nova] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Refreshing network info cache for port 5aeee040-2531-4d68-871a-2f65a93ad448 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 695.292743] env[69927]: DEBUG nova.scheduler.client.report [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 695.309689] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72af818d-050a-46e5-91ee-9f541b9466c0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.320465] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095453, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.359664] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095454, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.362492] env[69927]: DEBUG nova.compute.manager [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 695.530230] env[69927]: DEBUG oslo_concurrency.lockutils [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Acquiring lock "refresh_cache-ee422a46-c6e4-4098-8f74-b9f0779d0fba" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.530344] env[69927]: DEBUG oslo_concurrency.lockutils [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Acquired lock "refresh_cache-ee422a46-c6e4-4098-8f74-b9f0779d0fba" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.532769] env[69927]: DEBUG nova.network.neutron [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 695.558646] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]524f4ca1-5f87-37b9-a20c-c57b8e159bcd, 'name': SearchDatastore_Task, 'duration_secs': 0.016423} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.558995] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.559265] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 695.559532] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.559684] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.559891] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 695.560512] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e2f4242e-1d7a-427c-88ba-2ebd3be53fd4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.574676] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 695.574905] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 695.575819] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f440765a-d49e-4b31-bf56-3a3469e4879d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.583470] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Waiting for the task: (returnval){ [ 695.583470] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52fe3fd8-6ac6-61bf-f823-fb5d11ffaf80" [ 695.583470] env[69927]: _type = "Task" [ 695.583470] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.595166] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52fe3fd8-6ac6-61bf-f823-fb5d11ffaf80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.622155] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.793161] env[69927]: DEBUG oslo_concurrency.lockutils [req-a7b34e16-51a5-4bfa-a3e3-148b48fc19f5 req-57671b32-6865-43db-a8b8-225cbd86f9a9 service nova] Releasing lock "refresh_cache-ab8a8acc-cab7-4a82-bd90-b34147f17b0e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.793575] env[69927]: DEBUG nova.compute.manager [req-a7b34e16-51a5-4bfa-a3e3-148b48fc19f5 req-57671b32-6865-43db-a8b8-225cbd86f9a9 service nova] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Received event network-vif-deleted-4df42787-eead-48f3-a537-5f3f2a36a836 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 695.799778] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.625s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.800568] env[69927]: DEBUG nova.compute.manager [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 695.808881] env[69927]: DEBUG oslo_concurrency.lockutils [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.420s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.809979] env[69927]: INFO nova.compute.claims [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 695.825930] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095453, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.730365} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.826338] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] b1bcbcfb-2320-434c-901f-0f6a476a3069/b1bcbcfb-2320-434c-901f-0f6a476a3069.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 695.826735] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 695.828242] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2f1c524f-c74a-4ea9-a6c7-3600f7096f4f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.836523] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 695.836523] env[69927]: value = "task-4095455" [ 695.836523] env[69927]: _type = "Task" [ 695.836523] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.850483] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095455, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.865195] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095454, 'name': ReconfigVM_Task, 'duration_secs': 0.806978} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.865683] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Reconfigured VM instance instance-00000007 to attach disk [datastore1] c45d2259-2a05-49d5-81eb-4c79ced83121/c45d2259-2a05-49d5-81eb-4c79ced83121.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 695.868153] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-abf7af22-492e-499f-9cb5-f96160a80927 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.880033] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Waiting for the task: (returnval){ [ 695.880033] env[69927]: value = "task-4095456" [ 695.880033] env[69927]: _type = "Task" [ 695.880033] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.893731] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095456, 'name': Rename_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.895349] env[69927]: DEBUG oslo_concurrency.lockutils [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.099492] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52fe3fd8-6ac6-61bf-f823-fb5d11ffaf80, 'name': SearchDatastore_Task, 'duration_secs': 0.017026} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.099492] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c06c3e2-84c2-4dd6-9997-fe71b40a7ec2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.107876] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Waiting for the task: (returnval){ [ 696.107876] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a68722-e1a5-2d6b-1fcb-7357ffa4eeb6" [ 696.107876] env[69927]: _type = "Task" [ 696.107876] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.115369] env[69927]: DEBUG nova.network.neutron [req-c97fc932-82ab-480e-a401-3341b72eef2c req-655e6f64-f1b2-4a7e-bbd6-aa5ca3639e16 service nova] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Updated VIF entry in instance network info cache for port 5aeee040-2531-4d68-871a-2f65a93ad448. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 696.115842] env[69927]: DEBUG nova.network.neutron [req-c97fc932-82ab-480e-a401-3341b72eef2c req-655e6f64-f1b2-4a7e-bbd6-aa5ca3639e16 service nova] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Updating instance_info_cache with network_info: [{"id": "5aeee040-2531-4d68-871a-2f65a93ad448", "address": "fa:16:3e:40:b6:6c", "network": {"id": "c9bde960-e0b0-435d-a4d7-da374ee28b52", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-166313151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66dfd282dce84de1b56e8271ff7b0318", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5aeee040-25", "ovs_interfaceid": "5aeee040-2531-4d68-871a-2f65a93ad448", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.119763] env[69927]: DEBUG nova.network.neutron [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 696.126386] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a68722-e1a5-2d6b-1fcb-7357ffa4eeb6, 'name': SearchDatastore_Task, 'duration_secs': 0.014322} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.127407] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.127751] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 8edafb98-331a-45b8-8de8-4ba04b035ffd/8edafb98-331a-45b8-8de8-4ba04b035ffd.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 696.128100] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f122ce88-c5d9-482b-bf3b-7a026b2ab8ed {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.137663] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Waiting for the task: (returnval){ [ 696.137663] env[69927]: value = "task-4095457" [ 696.137663] env[69927]: _type = "Task" [ 696.137663] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.148235] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': task-4095457, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.309458] env[69927]: DEBUG nova.compute.utils [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 696.313965] env[69927]: DEBUG nova.compute.manager [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 696.313965] env[69927]: DEBUG nova.network.neutron [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 696.350784] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095455, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.103181} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.351151] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 696.352787] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-975380d2-1d98-48f5-96c7-785d700f7621 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.382312] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] b1bcbcfb-2320-434c-901f-0f6a476a3069/b1bcbcfb-2320-434c-901f-0f6a476a3069.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 696.387895] env[69927]: DEBUG nova.policy [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f9ce9220a25145e0bdd0d5868a96e53d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1b414ce037734749949fe7951f9ffd1e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 696.389367] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37236510-729f-4ff8-bd28-70dbf3c46e2c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.418865] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095456, 'name': Rename_Task, 'duration_secs': 0.242313} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.420495] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 696.421014] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 696.421014] env[69927]: value = "task-4095458" [ 696.421014] env[69927]: _type = "Task" [ 696.421014] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.421192] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-70ac6851-7594-497e-a50f-49fb3d430344 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.432889] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095458, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.434675] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Waiting for the task: (returnval){ [ 696.434675] env[69927]: value = "task-4095459" [ 696.434675] env[69927]: _type = "Task" [ 696.434675] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.444718] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095459, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.515863] env[69927]: DEBUG nova.network.neutron [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Updating instance_info_cache with network_info: [{"id": "4e3d0fe6-1171-4bdd-bfab-86bb4f7af637", "address": "fa:16:3e:46:5d:de", "network": {"id": "d34a6e93-05fd-40da-b452-332d47c34037", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1653403369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "179addc4042d4c65b15c008132e74bf6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e3d0fe6-11", "ovs_interfaceid": "4e3d0fe6-1171-4bdd-bfab-86bb4f7af637", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.583121] env[69927]: DEBUG nova.network.neutron [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Successfully updated port: 68ff56b2-b063-423a-93e6-b3aba5245461 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 696.621557] env[69927]: DEBUG oslo_concurrency.lockutils [req-c97fc932-82ab-480e-a401-3341b72eef2c req-655e6f64-f1b2-4a7e-bbd6-aa5ca3639e16 service nova] Releasing lock "refresh_cache-b1bcbcfb-2320-434c-901f-0f6a476a3069" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.651773] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': task-4095457, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.818844] env[69927]: DEBUG nova.compute.manager [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 696.941958] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095458, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.942932] env[69927]: DEBUG nova.network.neutron [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Successfully created port: 9a088536-28fd-4645-87ba-e071c754db1e {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 696.959852] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095459, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.021395] env[69927]: DEBUG oslo_concurrency.lockutils [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Releasing lock "refresh_cache-ee422a46-c6e4-4098-8f74-b9f0779d0fba" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 697.021836] env[69927]: DEBUG nova.compute.manager [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Instance network_info: |[{"id": "4e3d0fe6-1171-4bdd-bfab-86bb4f7af637", "address": "fa:16:3e:46:5d:de", "network": {"id": "d34a6e93-05fd-40da-b452-332d47c34037", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1653403369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "179addc4042d4c65b15c008132e74bf6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e3d0fe6-11", "ovs_interfaceid": "4e3d0fe6-1171-4bdd-bfab-86bb4f7af637", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 697.023777] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:5d:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '49b5df12-d801-4140-8816-2fd401608c7d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4e3d0fe6-1171-4bdd-bfab-86bb4f7af637', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 697.032311] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Creating folder: Project (179addc4042d4c65b15c008132e74bf6). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 697.032740] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b0a759e8-7c2a-4ba2-9039-99832b415475 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.047216] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Created folder: Project (179addc4042d4c65b15c008132e74bf6) in parent group-v811283. [ 697.047216] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Creating folder: Instances. Parent ref: group-v811323. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 697.047216] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fe023879-c2c1-4f03-86e8-cfc22c81293e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.061947] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Created folder: Instances in parent group-v811323. [ 697.062291] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 697.062921] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 697.062921] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ee1789c-81eb-480b-a03b-c68deabfc6b9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.085909] env[69927]: DEBUG nova.compute.manager [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Received event network-vif-plugged-34f427e8-f150-431f-960f-584ea344457c {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 697.086175] env[69927]: DEBUG oslo_concurrency.lockutils [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] Acquiring lock "8edafb98-331a-45b8-8de8-4ba04b035ffd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.086432] env[69927]: DEBUG oslo_concurrency.lockutils [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] Lock "8edafb98-331a-45b8-8de8-4ba04b035ffd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.086837] env[69927]: DEBUG oslo_concurrency.lockutils [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] Lock "8edafb98-331a-45b8-8de8-4ba04b035ffd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.086837] env[69927]: DEBUG nova.compute.manager [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] No waiting events found dispatching network-vif-plugged-34f427e8-f150-431f-960f-584ea344457c {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 697.086930] env[69927]: WARNING nova.compute.manager [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Received unexpected event network-vif-plugged-34f427e8-f150-431f-960f-584ea344457c for instance with vm_state building and task_state spawning. [ 697.087054] env[69927]: DEBUG nova.compute.manager [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Received event network-changed-34f427e8-f150-431f-960f-584ea344457c {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 697.087260] env[69927]: DEBUG nova.compute.manager [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Refreshing instance network info cache due to event network-changed-34f427e8-f150-431f-960f-584ea344457c. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 697.087991] env[69927]: DEBUG oslo_concurrency.lockutils [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] Acquiring lock "refresh_cache-8edafb98-331a-45b8-8de8-4ba04b035ffd" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.088451] env[69927]: DEBUG oslo_concurrency.lockutils [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] Acquired lock "refresh_cache-8edafb98-331a-45b8-8de8-4ba04b035ffd" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 697.088769] env[69927]: DEBUG nova.network.neutron [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Refreshing network info cache for port 34f427e8-f150-431f-960f-584ea344457c {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 697.096023] env[69927]: DEBUG oslo_concurrency.lockutils [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Acquiring lock "refresh_cache-6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.096023] env[69927]: DEBUG oslo_concurrency.lockutils [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Acquired lock "refresh_cache-6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 697.096023] env[69927]: DEBUG nova.network.neutron [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 697.100486] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 697.100486] env[69927]: value = "task-4095462" [ 697.100486] env[69927]: _type = "Task" [ 697.100486] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.110901] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095462, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.156114] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': task-4095457, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.852383} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.158304] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 8edafb98-331a-45b8-8de8-4ba04b035ffd/8edafb98-331a-45b8-8de8-4ba04b035ffd.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 697.158631] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 697.160027] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dcdcd2a2-632c-40ce-b9f4-890b8286a092 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.168801] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Waiting for the task: (returnval){ [ 697.168801] env[69927]: value = "task-4095463" [ 697.168801] env[69927]: _type = "Task" [ 697.168801] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.185501] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': task-4095463, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.295967] env[69927]: DEBUG oslo_concurrency.lockutils [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "f6972b90-7746-4a37-8be8-1739f96dc3dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.296121] env[69927]: DEBUG oslo_concurrency.lockutils [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "f6972b90-7746-4a37-8be8-1739f96dc3dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.393932] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51991130-beb2-443f-b247-b8497bab9c8c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.404262] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-474cd368-b44a-4bc1-bbf7-823cc2d8e207 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.454328] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5124a01a-6a7b-4bee-b47e-73c68628a4a5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.467314] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095459, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.467649] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095458, 'name': ReconfigVM_Task, 'duration_secs': 0.616488} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.469531] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Reconfigured VM instance instance-0000000b to attach disk [datastore1] b1bcbcfb-2320-434c-901f-0f6a476a3069/b1bcbcfb-2320-434c-901f-0f6a476a3069.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 697.470201] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a4b8feef-1370-4d3e-9843-dccdaaa5eca2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.473585] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14077fea-a59e-415e-a243-f649a76623d7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.492346] env[69927]: DEBUG nova.compute.provider_tree [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 697.495475] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 697.495475] env[69927]: value = "task-4095464" [ 697.495475] env[69927]: _type = "Task" [ 697.495475] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.504833] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095464, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.612854] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095462, 'name': CreateVM_Task, 'duration_secs': 0.473587} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.613153] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 697.614131] env[69927]: DEBUG oslo_concurrency.lockutils [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.614459] env[69927]: DEBUG oslo_concurrency.lockutils [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 697.614946] env[69927]: DEBUG oslo_concurrency.lockutils [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 697.615299] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa78fa36-04d9-4863-9e04-c42f54d623cd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.621097] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Waiting for the task: (returnval){ [ 697.621097] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5235830b-2652-00bc-e300-f17cdc7607fe" [ 697.621097] env[69927]: _type = "Task" [ 697.621097] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.630669] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5235830b-2652-00bc-e300-f17cdc7607fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.679015] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': task-4095463, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.140415} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.679597] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 697.680217] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c85303-ce0a-4925-ab6b-b5e7132f5b21 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.703706] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] 8edafb98-331a-45b8-8de8-4ba04b035ffd/8edafb98-331a-45b8-8de8-4ba04b035ffd.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 697.704842] env[69927]: DEBUG nova.network.neutron [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 697.707023] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1464127d-a482-4c5a-bd9f-90548adccaed {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.728881] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Waiting for the task: (returnval){ [ 697.728881] env[69927]: value = "task-4095465" [ 697.728881] env[69927]: _type = "Task" [ 697.728881] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.739609] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': task-4095465, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.836532] env[69927]: DEBUG nova.compute.manager [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 697.868120] env[69927]: DEBUG nova.virt.hardware [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 697.868398] env[69927]: DEBUG nova.virt.hardware [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 697.868537] env[69927]: DEBUG nova.virt.hardware [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 697.868715] env[69927]: DEBUG nova.virt.hardware [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 697.868861] env[69927]: DEBUG nova.virt.hardware [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 697.869015] env[69927]: DEBUG nova.virt.hardware [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 697.869235] env[69927]: DEBUG nova.virt.hardware [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 697.869391] env[69927]: DEBUG nova.virt.hardware [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 697.869559] env[69927]: DEBUG nova.virt.hardware [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 697.870158] env[69927]: DEBUG nova.virt.hardware [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 697.870158] env[69927]: DEBUG nova.virt.hardware [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 697.870823] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad133801-115d-49db-9f2f-a3de5135b035 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.882030] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1943666d-837e-4ff2-83f4-7733c1d21898 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.942257] env[69927]: DEBUG nova.compute.manager [req-be7af286-2fb2-498d-b769-b5f456312293 req-8750cbbe-3c61-4bf5-8a04-476b54ece33f service nova] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Received event network-changed-b112a351-376b-4433-94a9-e8e186f3dff3 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 697.942931] env[69927]: DEBUG nova.compute.manager [req-be7af286-2fb2-498d-b769-b5f456312293 req-8750cbbe-3c61-4bf5-8a04-476b54ece33f service nova] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Refreshing instance network info cache due to event network-changed-b112a351-376b-4433-94a9-e8e186f3dff3. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 697.942931] env[69927]: DEBUG oslo_concurrency.lockutils [req-be7af286-2fb2-498d-b769-b5f456312293 req-8750cbbe-3c61-4bf5-8a04-476b54ece33f service nova] Acquiring lock "refresh_cache-7ce79e41-333a-4ef3-ba68-f74067d4ac5a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.943037] env[69927]: DEBUG oslo_concurrency.lockutils [req-be7af286-2fb2-498d-b769-b5f456312293 req-8750cbbe-3c61-4bf5-8a04-476b54ece33f service nova] Acquired lock "refresh_cache-7ce79e41-333a-4ef3-ba68-f74067d4ac5a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 697.944096] env[69927]: DEBUG nova.network.neutron [req-be7af286-2fb2-498d-b769-b5f456312293 req-8750cbbe-3c61-4bf5-8a04-476b54ece33f service nova] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Refreshing network info cache for port b112a351-376b-4433-94a9-e8e186f3dff3 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 697.961911] env[69927]: DEBUG oslo_vmware.api [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095459, 'name': PowerOnVM_Task, 'duration_secs': 1.052029} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.962975] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 697.963251] env[69927]: DEBUG nova.compute.manager [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 697.964266] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf404eb-13a3-4a69-80af-8b0115206f9b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.998647] env[69927]: DEBUG nova.scheduler.client.report [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 698.018131] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095464, 'name': Rename_Task, 'duration_secs': 0.441417} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.018437] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 698.018688] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-79ec7726-112b-4880-b75f-353a3f947879 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.026880] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 698.026880] env[69927]: value = "task-4095466" [ 698.026880] env[69927]: _type = "Task" [ 698.026880] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.037184] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095466, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.063031] env[69927]: DEBUG nova.network.neutron [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Updating instance_info_cache with network_info: [{"id": "68ff56b2-b063-423a-93e6-b3aba5245461", "address": "fa:16:3e:a1:0e:04", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.105", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68ff56b2-b0", "ovs_interfaceid": "68ff56b2-b063-423a-93e6-b3aba5245461", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.133286] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5235830b-2652-00bc-e300-f17cdc7607fe, 'name': SearchDatastore_Task, 'duration_secs': 0.029843} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.133659] env[69927]: DEBUG oslo_concurrency.lockutils [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 698.133659] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 698.134583] env[69927]: DEBUG oslo_concurrency.lockutils [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.134875] env[69927]: DEBUG oslo_concurrency.lockutils [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 698.135156] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 698.135669] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-560c2ca7-1685-4eeb-92b1-8553060fed30 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.151125] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 698.151125] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 698.151125] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d123da6b-f731-402d-b024-db598c629842 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.162438] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Waiting for the task: (returnval){ [ 698.162438] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f8dfa9-1b3f-5127-58ed-092cbd114071" [ 698.162438] env[69927]: _type = "Task" [ 698.162438] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.176856] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f8dfa9-1b3f-5127-58ed-092cbd114071, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.196387] env[69927]: DEBUG nova.network.neutron [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Updated VIF entry in instance network info cache for port 34f427e8-f150-431f-960f-584ea344457c. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 698.196730] env[69927]: DEBUG nova.network.neutron [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Updating instance_info_cache with network_info: [{"id": "34f427e8-f150-431f-960f-584ea344457c", "address": "fa:16:3e:d5:38:60", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.94", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34f427e8-f1", "ovs_interfaceid": "34f427e8-f150-431f-960f-584ea344457c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.244844] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': task-4095465, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.497529] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.511686] env[69927]: DEBUG oslo_concurrency.lockutils [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.703s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.511686] env[69927]: DEBUG nova.compute.manager [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 698.515274] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.310s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.515581] env[69927]: DEBUG nova.objects.instance [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Lazy-loading 'resources' on Instance uuid 820c50b9-3c18-41bc-a000-22425b1dbb27 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 698.542242] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095466, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.565426] env[69927]: DEBUG oslo_concurrency.lockutils [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Releasing lock "refresh_cache-6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 698.565426] env[69927]: DEBUG nova.compute.manager [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Instance network_info: |[{"id": "68ff56b2-b063-423a-93e6-b3aba5245461", "address": "fa:16:3e:a1:0e:04", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.105", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68ff56b2-b0", "ovs_interfaceid": "68ff56b2-b063-423a-93e6-b3aba5245461", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 698.565807] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:0e:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '10b81051-1eb1-406b-888c-4548c470c77e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '68ff56b2-b063-423a-93e6-b3aba5245461', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 698.577949] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Creating folder: Project (6b03211868624de1bf2dd49a481310e6). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 698.578907] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b052d11-e716-4f14-b008-b9cdb874c5a7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.591167] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Created folder: Project (6b03211868624de1bf2dd49a481310e6) in parent group-v811283. [ 698.591373] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Creating folder: Instances. Parent ref: group-v811326. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 698.591628] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ca458afc-aa8d-471d-ae5f-500ff96e7b25 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.612249] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Created folder: Instances in parent group-v811326. [ 698.612249] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 698.612641] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 698.612861] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-09c9acaf-6be8-4587-9071-8257c49492ec {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.639581] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 698.639581] env[69927]: value = "task-4095469" [ 698.639581] env[69927]: _type = "Task" [ 698.639581] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.650266] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095469, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.682286] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f8dfa9-1b3f-5127-58ed-092cbd114071, 'name': SearchDatastore_Task, 'duration_secs': 0.021025} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.682286] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf495a05-dd16-493c-8105-8ff4e0390c06 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.698364] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Waiting for the task: (returnval){ [ 698.698364] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a79e8c-ced9-8432-cd41-818804c5c596" [ 698.698364] env[69927]: _type = "Task" [ 698.698364] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.707148] env[69927]: DEBUG oslo_concurrency.lockutils [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] Releasing lock "refresh_cache-8edafb98-331a-45b8-8de8-4ba04b035ffd" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 698.707148] env[69927]: DEBUG nova.compute.manager [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Received event network-vif-plugged-4e3d0fe6-1171-4bdd-bfab-86bb4f7af637 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 698.707148] env[69927]: DEBUG oslo_concurrency.lockutils [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] Acquiring lock "ee422a46-c6e4-4098-8f74-b9f0779d0fba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.707148] env[69927]: DEBUG oslo_concurrency.lockutils [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] Lock "ee422a46-c6e4-4098-8f74-b9f0779d0fba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.707148] env[69927]: DEBUG oslo_concurrency.lockutils [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] Lock "ee422a46-c6e4-4098-8f74-b9f0779d0fba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.708294] env[69927]: DEBUG nova.compute.manager [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] No waiting events found dispatching network-vif-plugged-4e3d0fe6-1171-4bdd-bfab-86bb4f7af637 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 698.708294] env[69927]: WARNING nova.compute.manager [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Received unexpected event network-vif-plugged-4e3d0fe6-1171-4bdd-bfab-86bb4f7af637 for instance with vm_state building and task_state spawning. [ 698.708294] env[69927]: DEBUG nova.compute.manager [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Received event network-changed-4e3d0fe6-1171-4bdd-bfab-86bb4f7af637 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 698.708294] env[69927]: DEBUG nova.compute.manager [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Refreshing instance network info cache due to event network-changed-4e3d0fe6-1171-4bdd-bfab-86bb4f7af637. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 698.708294] env[69927]: DEBUG oslo_concurrency.lockutils [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] Acquiring lock "refresh_cache-ee422a46-c6e4-4098-8f74-b9f0779d0fba" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.709973] env[69927]: DEBUG oslo_concurrency.lockutils [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] Acquired lock "refresh_cache-ee422a46-c6e4-4098-8f74-b9f0779d0fba" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 698.709973] env[69927]: DEBUG nova.network.neutron [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Refreshing network info cache for port 4e3d0fe6-1171-4bdd-bfab-86bb4f7af637 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 698.719107] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a79e8c-ced9-8432-cd41-818804c5c596, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.745484] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': task-4095465, 'name': ReconfigVM_Task, 'duration_secs': 0.715817} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.745484] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Reconfigured VM instance instance-0000000c to attach disk [datastore1] 8edafb98-331a-45b8-8de8-4ba04b035ffd/8edafb98-331a-45b8-8de8-4ba04b035ffd.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 698.745484] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cb2be47c-826c-4a35-89c7-25eec5ea5efd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.753228] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Waiting for the task: (returnval){ [ 698.753228] env[69927]: value = "task-4095470" [ 698.753228] env[69927]: _type = "Task" [ 698.753228] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.768459] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': task-4095470, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.025511] env[69927]: DEBUG nova.compute.utils [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 699.028029] env[69927]: DEBUG nova.compute.manager [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 699.028029] env[69927]: DEBUG nova.network.neutron [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 699.042161] env[69927]: DEBUG oslo_vmware.api [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095466, 'name': PowerOnVM_Task, 'duration_secs': 0.677545} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.042161] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 699.042161] env[69927]: INFO nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Took 11.93 seconds to spawn the instance on the hypervisor. [ 699.042752] env[69927]: DEBUG nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 699.043703] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c53e1612-b36c-4642-9bf4-50a3edc0deba {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.152986] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095469, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.216160] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a79e8c-ced9-8432-cd41-818804c5c596, 'name': SearchDatastore_Task, 'duration_secs': 0.032332} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.216160] env[69927]: DEBUG oslo_concurrency.lockutils [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 699.216160] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] ee422a46-c6e4-4098-8f74-b9f0779d0fba/ee422a46-c6e4-4098-8f74-b9f0779d0fba.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 699.219277] env[69927]: DEBUG nova.network.neutron [req-be7af286-2fb2-498d-b769-b5f456312293 req-8750cbbe-3c61-4bf5-8a04-476b54ece33f service nova] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Updated VIF entry in instance network info cache for port b112a351-376b-4433-94a9-e8e186f3dff3. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 699.219644] env[69927]: DEBUG nova.network.neutron [req-be7af286-2fb2-498d-b769-b5f456312293 req-8750cbbe-3c61-4bf5-8a04-476b54ece33f service nova] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Updating instance_info_cache with network_info: [{"id": "b112a351-376b-4433-94a9-e8e186f3dff3", "address": "fa:16:3e:b5:a4:e8", "network": {"id": "0344fc7a-5b52-4c14-9a09-fd627f1d3d43", "bridge": "br-int", "label": "tempest-ServersTestJSON-28061948-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc73b36a965a4503b546cc90a2950441", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb112a351-37", "ovs_interfaceid": "b112a351-376b-4433-94a9-e8e186f3dff3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.226023] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b8a814f6-c8d9-440e-9f84-230406db6d0c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.231057] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Waiting for the task: (returnval){ [ 699.231057] env[69927]: value = "task-4095471" [ 699.231057] env[69927]: _type = "Task" [ 699.231057] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.243570] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': task-4095471, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.265607] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': task-4095470, 'name': Rename_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.392113] env[69927]: DEBUG nova.policy [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20629f26389d40199a4c5d5d2312dbae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c2fb1fc4c3ae41a5b331c6be7973eb72', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 699.497791] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-720292a1-ade3-45c3-9af8-7e9b126e17dc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.512476] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-001a6425-5147-4a2b-b235-0e4268d6d6e9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.549594] env[69927]: DEBUG nova.compute.manager [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 699.555066] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1132fbea-18e4-4e30-af2e-57abd7d2388f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.562881] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a55c1b3d-3e36-4ae3-817d-cdce5b569fdd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.585444] env[69927]: DEBUG nova.compute.provider_tree [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 699.592027] env[69927]: INFO nova.compute.manager [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Took 27.19 seconds to build instance. [ 699.654409] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095469, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.726679] env[69927]: DEBUG oslo_concurrency.lockutils [req-be7af286-2fb2-498d-b769-b5f456312293 req-8750cbbe-3c61-4bf5-8a04-476b54ece33f service nova] Releasing lock "refresh_cache-7ce79e41-333a-4ef3-ba68-f74067d4ac5a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 699.747960] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': task-4095471, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.764993] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': task-4095470, 'name': Rename_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.767158] env[69927]: DEBUG nova.network.neutron [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Successfully updated port: 9a088536-28fd-4645-87ba-e071c754db1e {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 700.079151] env[69927]: DEBUG nova.network.neutron [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Updated VIF entry in instance network info cache for port 4e3d0fe6-1171-4bdd-bfab-86bb4f7af637. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 700.080135] env[69927]: DEBUG nova.network.neutron [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Updating instance_info_cache with network_info: [{"id": "4e3d0fe6-1171-4bdd-bfab-86bb4f7af637", "address": "fa:16:3e:46:5d:de", "network": {"id": "d34a6e93-05fd-40da-b452-332d47c34037", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1653403369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "179addc4042d4c65b15c008132e74bf6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e3d0fe6-11", "ovs_interfaceid": "4e3d0fe6-1171-4bdd-bfab-86bb4f7af637", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.090347] env[69927]: DEBUG nova.scheduler.client.report [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 700.097025] env[69927]: DEBUG oslo_concurrency.lockutils [None req-288ff329-245e-4962-a8fe-09a35a53bec8 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "b1bcbcfb-2320-434c-901f-0f6a476a3069" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.707s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.159213] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095469, 'name': CreateVM_Task, 'duration_secs': 1.513465} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.159213] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 700.159213] env[69927]: DEBUG oslo_concurrency.lockutils [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.159213] env[69927]: DEBUG oslo_concurrency.lockutils [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 700.159213] env[69927]: DEBUG oslo_concurrency.lockutils [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 700.160265] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8eff7230-5b10-4c13-acb2-67e8b594459a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.166332] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Waiting for the task: (returnval){ [ 700.166332] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f2e6f2-b49d-f65a-82aa-6f092c56704c" [ 700.166332] env[69927]: _type = "Task" [ 700.166332] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.180983] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f2e6f2-b49d-f65a-82aa-6f092c56704c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.248953] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': task-4095471, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.268143] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': task-4095470, 'name': Rename_Task, 'duration_secs': 1.268053} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.268143] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 700.268495] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5d3df106-a80b-45f4-8861-064d9a9ec9db {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.272663] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Acquiring lock "refresh_cache-8442f144-2be4-4634-b151-62f049a975b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.272663] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Acquired lock "refresh_cache-8442f144-2be4-4634-b151-62f049a975b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 700.272663] env[69927]: DEBUG nova.network.neutron [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 700.284465] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Waiting for the task: (returnval){ [ 700.284465] env[69927]: value = "task-4095472" [ 700.284465] env[69927]: _type = "Task" [ 700.284465] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.298970] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': task-4095472, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.352873] env[69927]: DEBUG nova.network.neutron [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Successfully created port: e4473f09-99e5-4ac3-b9cc-3316e06abd65 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 700.564677] env[69927]: DEBUG nova.compute.manager [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 700.584017] env[69927]: DEBUG oslo_concurrency.lockutils [req-c057a0f3-8839-41c3-98f4-975e5b6c7c93 req-e3e2f7e8-ce4b-43e4-90f2-6cd5b2631363 service nova] Releasing lock "refresh_cache-ee422a46-c6e4-4098-8f74-b9f0779d0fba" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 700.601018] env[69927]: DEBUG nova.virt.hardware [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 700.601155] env[69927]: DEBUG nova.virt.hardware [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 700.601236] env[69927]: DEBUG nova.virt.hardware [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 700.601465] env[69927]: DEBUG nova.virt.hardware [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 700.601643] env[69927]: DEBUG nova.virt.hardware [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 700.601754] env[69927]: DEBUG nova.virt.hardware [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 700.603108] env[69927]: DEBUG nova.virt.hardware [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 700.603188] env[69927]: DEBUG nova.virt.hardware [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 700.603550] env[69927]: DEBUG nova.virt.hardware [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 700.603550] env[69927]: DEBUG nova.virt.hardware [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 700.603720] env[69927]: DEBUG nova.virt.hardware [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 700.604666] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.090s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.608110] env[69927]: DEBUG nova.compute.manager [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 700.613207] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df0d8d9-e0b2-4f1d-b980-e41b15ec11b5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.618465] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.006s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 700.620126] env[69927]: INFO nova.compute.claims [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 700.628700] env[69927]: DEBUG oslo_vmware.rw_handles [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f5e7a1-0a94-682b-4e3d-6232c254270e/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 700.631722] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20793303-e0d0-4da5-97e2-177cde1f0600 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.641756] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69ce872-4057-4cce-9157-f95f16e1b486 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.653603] env[69927]: DEBUG oslo_vmware.rw_handles [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f5e7a1-0a94-682b-4e3d-6232c254270e/disk-0.vmdk is in state: ready. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 700.653603] env[69927]: ERROR oslo_vmware.rw_handles [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f5e7a1-0a94-682b-4e3d-6232c254270e/disk-0.vmdk due to incomplete transfer. [ 700.654962] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-40c2e8c3-6321-479a-8a90-11f6a1470638 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.671020] env[69927]: INFO nova.scheduler.client.report [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Deleted allocations for instance 820c50b9-3c18-41bc-a000-22425b1dbb27 [ 700.671020] env[69927]: DEBUG oslo_vmware.rw_handles [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f5e7a1-0a94-682b-4e3d-6232c254270e/disk-0.vmdk. {{(pid=69927) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 700.671020] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Uploaded image 35caae73-182c-41ae-bee8-8a7014804e5e to the Glance image server {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 700.671020] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Destroying the VM {{(pid=69927) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 700.674016] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a2f0d91f-66ad-4e3a-bde0-e1b8333266bb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.690982] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f2e6f2-b49d-f65a-82aa-6f092c56704c, 'name': SearchDatastore_Task, 'duration_secs': 0.015261} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.692628] env[69927]: DEBUG oslo_concurrency.lockutils [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 700.692888] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 700.693210] env[69927]: DEBUG oslo_concurrency.lockutils [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.693708] env[69927]: DEBUG oslo_concurrency.lockutils [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 700.693708] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 700.693825] env[69927]: DEBUG oslo_vmware.api [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Waiting for the task: (returnval){ [ 700.693825] env[69927]: value = "task-4095473" [ 700.693825] env[69927]: _type = "Task" [ 700.693825] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.694046] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-efce17d0-36e7-49be-a618-9332e0279765 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.706694] env[69927]: DEBUG oslo_vmware.api [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095473, 'name': Destroy_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.720370] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 700.720639] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 700.722861] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9229bba5-41bc-4e93-9282-7143b58116e1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.730838] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Waiting for the task: (returnval){ [ 700.730838] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525637ff-a953-e9e1-a510-4880c382a3b3" [ 700.730838] env[69927]: _type = "Task" [ 700.730838] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.753652] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525637ff-a953-e9e1-a510-4880c382a3b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.761633] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': task-4095471, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.371949} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.762526] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] ee422a46-c6e4-4098-8f74-b9f0779d0fba/ee422a46-c6e4-4098-8f74-b9f0779d0fba.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 700.762935] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 700.763749] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c5bcc17d-60ef-472e-b5aa-2c1d8490551a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.773293] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Waiting for the task: (returnval){ [ 700.773293] env[69927]: value = "task-4095474" [ 700.773293] env[69927]: _type = "Task" [ 700.773293] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.794892] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': task-4095474, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.802409] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': task-4095472, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.876237] env[69927]: DEBUG nova.network.neutron [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 701.148258] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.186847] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b28916e0-198e-4d85-91b9-0b08e633cee8 tempest-ServerDiagnosticsNegativeTest-1566978478 tempest-ServerDiagnosticsNegativeTest-1566978478-project-member] Lock "820c50b9-3c18-41bc-a000-22425b1dbb27" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.726s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.191361] env[69927]: DEBUG nova.network.neutron [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Updating instance_info_cache with network_info: [{"id": "9a088536-28fd-4645-87ba-e071c754db1e", "address": "fa:16:3e:ad:f4:34", "network": {"id": "187adf43-df44-41b7-8dc1-b541b4f84fee", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1475247123-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b414ce037734749949fe7951f9ffd1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d8383707-f093-40a7-a5ba-31b0e07cac45", "external-id": "cl2-zone-18", "segmentation_id": 18, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a088536-28", "ovs_interfaceid": "9a088536-28fd-4645-87ba-e071c754db1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.208573] env[69927]: DEBUG oslo_vmware.api [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095473, 'name': Destroy_Task} progress is 33%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.249351] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525637ff-a953-e9e1-a510-4880c382a3b3, 'name': SearchDatastore_Task, 'duration_secs': 0.020075} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.250215] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bc4617e-5cb3-4fe1-9513-d0c253ec0ec7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.257261] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Waiting for the task: (returnval){ [ 701.257261] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ac15ad-e17b-6e65-4f42-76bf8d6aef3c" [ 701.257261] env[69927]: _type = "Task" [ 701.257261] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.267776] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ac15ad-e17b-6e65-4f42-76bf8d6aef3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.291813] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': task-4095474, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080654} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.293143] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 701.294316] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58cb39de-4be9-4543-b85e-e32fc0095ea6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.303488] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': task-4095472, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.327027] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] ee422a46-c6e4-4098-8f74-b9f0779d0fba/ee422a46-c6e4-4098-8f74-b9f0779d0fba.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 701.327027] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66f35af6-baf4-466f-ad99-0991036aca79 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.348071] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Waiting for the task: (returnval){ [ 701.348071] env[69927]: value = "task-4095475" [ 701.348071] env[69927]: _type = "Task" [ 701.348071] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.358425] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': task-4095475, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.366828] env[69927]: DEBUG nova.compute.manager [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Received event network-vif-plugged-68ff56b2-b063-423a-93e6-b3aba5245461 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 701.370358] env[69927]: DEBUG oslo_concurrency.lockutils [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] Acquiring lock "6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.371114] env[69927]: DEBUG oslo_concurrency.lockutils [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] Lock "6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.003s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.371114] env[69927]: DEBUG oslo_concurrency.lockutils [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] Lock "6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.371614] env[69927]: DEBUG nova.compute.manager [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] No waiting events found dispatching network-vif-plugged-68ff56b2-b063-423a-93e6-b3aba5245461 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 701.371614] env[69927]: WARNING nova.compute.manager [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Received unexpected event network-vif-plugged-68ff56b2-b063-423a-93e6-b3aba5245461 for instance with vm_state building and task_state spawning. [ 701.371614] env[69927]: DEBUG nova.compute.manager [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Received event network-changed-68ff56b2-b063-423a-93e6-b3aba5245461 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 701.372544] env[69927]: DEBUG nova.compute.manager [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Refreshing instance network info cache due to event network-changed-68ff56b2-b063-423a-93e6-b3aba5245461. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 701.372544] env[69927]: DEBUG oslo_concurrency.lockutils [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] Acquiring lock "refresh_cache-6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.372544] env[69927]: DEBUG oslo_concurrency.lockutils [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] Acquired lock "refresh_cache-6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 701.372544] env[69927]: DEBUG nova.network.neutron [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Refreshing network info cache for port 68ff56b2-b063-423a-93e6-b3aba5245461 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 701.696339] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Releasing lock "refresh_cache-8442f144-2be4-4634-b151-62f049a975b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 701.697469] env[69927]: DEBUG nova.compute.manager [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Instance network_info: |[{"id": "9a088536-28fd-4645-87ba-e071c754db1e", "address": "fa:16:3e:ad:f4:34", "network": {"id": "187adf43-df44-41b7-8dc1-b541b4f84fee", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1475247123-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b414ce037734749949fe7951f9ffd1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d8383707-f093-40a7-a5ba-31b0e07cac45", "external-id": "cl2-zone-18", "segmentation_id": 18, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a088536-28", "ovs_interfaceid": "9a088536-28fd-4645-87ba-e071c754db1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 701.700020] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:f4:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd8383707-f093-40a7-a5ba-31b0e07cac45', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a088536-28fd-4645-87ba-e071c754db1e', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 701.711786] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Creating folder: Project (1b414ce037734749949fe7951f9ffd1e). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 701.718928] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2929d591-e950-4748-be05-8560add6f396 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.727894] env[69927]: DEBUG oslo_vmware.api [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095473, 'name': Destroy_Task, 'duration_secs': 0.740475} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.728128] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Destroyed the VM [ 701.728787] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Deleting Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 701.730620] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-54397716-3470-4747-89f4-fbc40a6a1598 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.734518] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Created folder: Project (1b414ce037734749949fe7951f9ffd1e) in parent group-v811283. [ 701.734518] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Creating folder: Instances. Parent ref: group-v811329. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 701.737021] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-217ef546-4ed1-407d-9fa4-e1e3222adbb7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.745622] env[69927]: DEBUG oslo_vmware.api [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Waiting for the task: (returnval){ [ 701.745622] env[69927]: value = "task-4095477" [ 701.745622] env[69927]: _type = "Task" [ 701.745622] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.753318] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Created folder: Instances in parent group-v811329. [ 701.755125] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 701.758240] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 701.759063] env[69927]: DEBUG oslo_vmware.api [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095477, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.764232] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5dc9fc2f-dfe4-418a-bb9d-72afc77f678e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.792119] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 701.792119] env[69927]: value = "task-4095479" [ 701.792119] env[69927]: _type = "Task" [ 701.792119] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.800096] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ac15ad-e17b-6e65-4f42-76bf8d6aef3c, 'name': SearchDatastore_Task, 'duration_secs': 0.021783} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.803658] env[69927]: DEBUG oslo_concurrency.lockutils [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 701.803658] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85/6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 701.808554] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-50e2968b-6291-4fb9-bbb7-6d2dd3cd00a5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.811009] env[69927]: DEBUG oslo_vmware.api [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': task-4095472, 'name': PowerOnVM_Task, 'duration_secs': 1.248606} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.815198] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 701.815411] env[69927]: INFO nova.compute.manager [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Took 12.17 seconds to spawn the instance on the hypervisor. [ 701.815600] env[69927]: DEBUG nova.compute.manager [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 701.815884] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095479, 'name': CreateVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.821221] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1ef3b4-3e2e-4e38-a782-4270e02521b8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.825449] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Waiting for the task: (returnval){ [ 701.825449] env[69927]: value = "task-4095480" [ 701.825449] env[69927]: _type = "Task" [ 701.825449] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.841099] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': task-4095480, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.860267] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': task-4095475, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.980617] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquiring lock "4bf59fae-8029-421b-95fd-a0d008891ce7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.981076] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "4bf59fae-8029-421b-95fd-a0d008891ce7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.981525] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquiring lock "4bf59fae-8029-421b-95fd-a0d008891ce7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.981525] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "4bf59fae-8029-421b-95fd-a0d008891ce7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.981762] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "4bf59fae-8029-421b-95fd-a0d008891ce7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.984703] env[69927]: INFO nova.compute.manager [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Terminating instance [ 702.185336] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cc79c50-aaef-4972-adfa-8c1ed1f783df {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.198526] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84709070-c802-495b-9f0a-144d7ddc3275 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.258381] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c13efb81-ade4-4120-9054-f21835991418 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.267788] env[69927]: DEBUG oslo_vmware.api [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095477, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.271936] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe5df483-2171-4ea0-aa01-c83796cfce8d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.291578] env[69927]: DEBUG nova.compute.provider_tree [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 702.312089] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095479, 'name': CreateVM_Task, 'duration_secs': 0.401342} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.312089] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 702.312089] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.312089] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 702.312089] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 702.312539] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec474806-d92e-40f2-8199-a921a3e885b5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.320038] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Waiting for the task: (returnval){ [ 702.320038] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52aa133b-06d3-459d-0903-72e5bd3dda27" [ 702.320038] env[69927]: _type = "Task" [ 702.320038] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.334767] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52aa133b-06d3-459d-0903-72e5bd3dda27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.346632] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': task-4095480, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.353369] env[69927]: INFO nova.compute.manager [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Took 28.74 seconds to build instance. [ 702.364834] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': task-4095475, 'name': ReconfigVM_Task, 'duration_secs': 0.650214} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.365197] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Reconfigured VM instance instance-0000000d to attach disk [datastore1] ee422a46-c6e4-4098-8f74-b9f0779d0fba/ee422a46-c6e4-4098-8f74-b9f0779d0fba.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 702.365937] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4608062c-93fd-4738-9967-0fc490a2c90c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.379263] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Waiting for the task: (returnval){ [ 702.379263] env[69927]: value = "task-4095481" [ 702.379263] env[69927]: _type = "Task" [ 702.379263] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.396517] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': task-4095481, 'name': Rename_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.493849] env[69927]: DEBUG nova.compute.manager [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 702.493849] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 702.494704] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d9100c-2e7f-444e-a4c4-04943304cddb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.506898] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 702.507189] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1c207594-4132-4012-810a-352eee38bc7e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.516750] env[69927]: DEBUG oslo_vmware.api [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 702.516750] env[69927]: value = "task-4095482" [ 702.516750] env[69927]: _type = "Task" [ 702.516750] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.529874] env[69927]: DEBUG oslo_vmware.api [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095482, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.764267] env[69927]: DEBUG oslo_vmware.api [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095477, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.799366] env[69927]: DEBUG nova.scheduler.client.report [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 702.848646] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52aa133b-06d3-459d-0903-72e5bd3dda27, 'name': SearchDatastore_Task, 'duration_secs': 0.027696} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.848908] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': task-4095480, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.722849} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.849196] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 702.849436] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 702.849774] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.849938] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 702.850137] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 702.851086] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85/6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 702.851086] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 702.851086] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a340cf8f-e31c-4eb6-a41a-97ff8d9d0228 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.853790] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d6fe3570-a7b7-4625-aa5f-da2feb7a2f86 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.858148] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1148cd85-0485-454f-a3f3-aa152d757056 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Lock "8edafb98-331a-45b8-8de8-4ba04b035ffd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.835s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 702.867941] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 702.868151] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 702.868946] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acc59fa2-648b-4971-aeef-b1ca3a324517 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.875089] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Waiting for the task: (returnval){ [ 702.875089] env[69927]: value = "task-4095483" [ 702.875089] env[69927]: _type = "Task" [ 702.875089] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.886572] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Waiting for the task: (returnval){ [ 702.886572] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5273b4f7-f2f3-9fcc-22ed-afe16bdd8a23" [ 702.886572] env[69927]: _type = "Task" [ 702.886572] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.891304] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': task-4095483, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.902035] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': task-4095481, 'name': Rename_Task, 'duration_secs': 0.378636} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.902832] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 702.903236] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8c05ddd5-015b-4e76-8b97-fa199babf116 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.912406] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5273b4f7-f2f3-9fcc-22ed-afe16bdd8a23, 'name': SearchDatastore_Task, 'duration_secs': 0.01933} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.915404] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Waiting for the task: (returnval){ [ 702.915404] env[69927]: value = "task-4095484" [ 702.915404] env[69927]: _type = "Task" [ 702.915404] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.915833] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d9043ec-7a34-4df8-90e0-cf25063b8a5d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.933227] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': task-4095484, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.933227] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Waiting for the task: (returnval){ [ 702.933227] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52356662-885f-3021-c14f-b9852e90dc04" [ 702.933227] env[69927]: _type = "Task" [ 702.933227] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.944656] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52356662-885f-3021-c14f-b9852e90dc04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.966975] env[69927]: DEBUG nova.network.neutron [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Updated VIF entry in instance network info cache for port 68ff56b2-b063-423a-93e6-b3aba5245461. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 702.967362] env[69927]: DEBUG nova.network.neutron [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Updating instance_info_cache with network_info: [{"id": "68ff56b2-b063-423a-93e6-b3aba5245461", "address": "fa:16:3e:a1:0e:04", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.105", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68ff56b2-b0", "ovs_interfaceid": "68ff56b2-b063-423a-93e6-b3aba5245461", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.994154] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Acquiring lock "c45d2259-2a05-49d5-81eb-4c79ced83121" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 702.994416] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Lock "c45d2259-2a05-49d5-81eb-4c79ced83121" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 702.994926] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Acquiring lock "c45d2259-2a05-49d5-81eb-4c79ced83121-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 702.994926] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Lock "c45d2259-2a05-49d5-81eb-4c79ced83121-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 702.995117] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Lock "c45d2259-2a05-49d5-81eb-4c79ced83121-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 702.997401] env[69927]: INFO nova.compute.manager [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Terminating instance [ 703.029423] env[69927]: DEBUG oslo_vmware.api [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095482, 'name': PowerOffVM_Task, 'duration_secs': 0.275101} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.029423] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 703.029519] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 703.030099] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8cdd151b-0973-4046-81e5-36d52d8d6ff2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.119696] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 703.119918] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 703.120107] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Deleting the datastore file [datastore1] 4bf59fae-8029-421b-95fd-a0d008891ce7 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 703.120379] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c5b026b3-75a9-4092-93b2-544f387faad9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.130182] env[69927]: DEBUG oslo_vmware.api [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 703.130182] env[69927]: value = "task-4095486" [ 703.130182] env[69927]: _type = "Task" [ 703.130182] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.140569] env[69927]: DEBUG oslo_vmware.api [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095486, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.265064] env[69927]: DEBUG oslo_vmware.api [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095477, 'name': RemoveSnapshot_Task, 'duration_secs': 1.267347} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.265358] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Deleted Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 703.266250] env[69927]: INFO nova.compute.manager [None req-351fd9f9-e6a0-4a5e-8b64-b8200ea90c5d tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Took 17.79 seconds to snapshot the instance on the hypervisor. [ 703.288072] env[69927]: DEBUG nova.network.neutron [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Successfully updated port: e4473f09-99e5-4ac3-b9cc-3316e06abd65 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 703.306017] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.687s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 703.306017] env[69927]: DEBUG nova.compute.manager [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 703.309667] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.640s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.311802] env[69927]: INFO nova.compute.claims [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 703.354746] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquiring lock "a4249857-6f60-4040-b676-d2d19dc83f15" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.354746] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Lock "a4249857-6f60-4040-b676-d2d19dc83f15" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.366678] env[69927]: DEBUG nova.compute.manager [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 703.389549] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': task-4095483, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.104458} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.389888] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 703.390784] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-724e82d6-4edb-474e-a680-62973316a99d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.426035] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85/6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 703.426035] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d960fff3-a139-40d8-8c82-ac6162edb9dc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.455214] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': task-4095484, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.460026] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Waiting for the task: (returnval){ [ 703.460026] env[69927]: value = "task-4095487" [ 703.460026] env[69927]: _type = "Task" [ 703.460026] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.460895] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52356662-885f-3021-c14f-b9852e90dc04, 'name': SearchDatastore_Task, 'duration_secs': 0.017968} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.460895] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 703.460895] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 8442f144-2be4-4634-b151-62f049a975b6/8442f144-2be4-4634-b151-62f049a975b6.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 703.464027] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-715445ac-b758-4b02-bab5-4bd493655475 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.474098] env[69927]: DEBUG oslo_concurrency.lockutils [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] Releasing lock "refresh_cache-6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 703.474394] env[69927]: DEBUG nova.compute.manager [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Received event network-vif-plugged-9a088536-28fd-4645-87ba-e071c754db1e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 703.475604] env[69927]: DEBUG oslo_concurrency.lockutils [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] Acquiring lock "8442f144-2be4-4634-b151-62f049a975b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.475604] env[69927]: DEBUG oslo_concurrency.lockutils [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] Lock "8442f144-2be4-4634-b151-62f049a975b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.475604] env[69927]: DEBUG oslo_concurrency.lockutils [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] Lock "8442f144-2be4-4634-b151-62f049a975b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 703.475604] env[69927]: DEBUG nova.compute.manager [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] [instance: 8442f144-2be4-4634-b151-62f049a975b6] No waiting events found dispatching network-vif-plugged-9a088536-28fd-4645-87ba-e071c754db1e {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 703.475604] env[69927]: WARNING nova.compute.manager [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Received unexpected event network-vif-plugged-9a088536-28fd-4645-87ba-e071c754db1e for instance with vm_state building and task_state spawning. [ 703.475839] env[69927]: DEBUG nova.compute.manager [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Received event network-changed-9a088536-28fd-4645-87ba-e071c754db1e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 703.475839] env[69927]: DEBUG nova.compute.manager [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Refreshing instance network info cache due to event network-changed-9a088536-28fd-4645-87ba-e071c754db1e. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 703.475839] env[69927]: DEBUG oslo_concurrency.lockutils [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] Acquiring lock "refresh_cache-8442f144-2be4-4634-b151-62f049a975b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.475984] env[69927]: DEBUG oslo_concurrency.lockutils [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] Acquired lock "refresh_cache-8442f144-2be4-4634-b151-62f049a975b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 703.476323] env[69927]: DEBUG nova.network.neutron [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Refreshing network info cache for port 9a088536-28fd-4645-87ba-e071c754db1e {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 703.477314] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': task-4095487, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.479094] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Waiting for the task: (returnval){ [ 703.479094] env[69927]: value = "task-4095488" [ 703.479094] env[69927]: _type = "Task" [ 703.479094] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.488514] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': task-4095488, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.501601] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Acquiring lock "refresh_cache-c45d2259-2a05-49d5-81eb-4c79ced83121" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.502043] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Acquired lock "refresh_cache-c45d2259-2a05-49d5-81eb-4c79ced83121" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 703.502043] env[69927]: DEBUG nova.network.neutron [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 703.646797] env[69927]: DEBUG oslo_vmware.api [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095486, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.419344} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.647117] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 703.647726] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 703.647985] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 703.648335] env[69927]: INFO nova.compute.manager [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Took 1.16 seconds to destroy the instance on the hypervisor. [ 703.648599] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 703.648996] env[69927]: DEBUG nova.compute.manager [-] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 703.649206] env[69927]: DEBUG nova.network.neutron [-] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 703.794647] env[69927]: DEBUG oslo_concurrency.lockutils [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "refresh_cache-128d0705-21a0-4103-ae84-85bbac7e718b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.794814] env[69927]: DEBUG oslo_concurrency.lockutils [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquired lock "refresh_cache-128d0705-21a0-4103-ae84-85bbac7e718b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 703.794970] env[69927]: DEBUG nova.network.neutron [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 703.823582] env[69927]: DEBUG nova.compute.utils [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 703.825209] env[69927]: DEBUG nova.compute.manager [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 703.825209] env[69927]: DEBUG nova.network.neutron [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 703.904663] env[69927]: DEBUG oslo_concurrency.lockutils [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.937034] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': task-4095484, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.976061] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': task-4095487, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.976938] env[69927]: DEBUG nova.policy [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ed22a7ac85664bd8b86c8a30c8d51910', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd28bd5c5feee4a39b76694d57eb3aaf0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 703.993710] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': task-4095488, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.042973] env[69927]: DEBUG nova.network.neutron [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 704.248551] env[69927]: DEBUG nova.network.neutron [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.325854] env[69927]: DEBUG nova.compute.manager [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 704.394155] env[69927]: DEBUG nova.network.neutron [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 704.441061] env[69927]: DEBUG oslo_vmware.api [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': task-4095484, 'name': PowerOnVM_Task, 'duration_secs': 1.165193} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.443265] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 704.443391] env[69927]: INFO nova.compute.manager [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Took 12.04 seconds to spawn the instance on the hypervisor. [ 704.443605] env[69927]: DEBUG nova.compute.manager [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 704.445290] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8594d8a-3938-445f-b039-3b163711923f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.476900] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': task-4095487, 'name': ReconfigVM_Task, 'duration_secs': 0.834899} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.477890] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Reconfigured VM instance instance-0000000e to attach disk [datastore1] 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85/6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 704.478631] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-348509e0-8a95-4548-8f7e-c4b1a3024dfb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.495019] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Waiting for the task: (returnval){ [ 704.495019] env[69927]: value = "task-4095489" [ 704.495019] env[69927]: _type = "Task" [ 704.495019] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.499167] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': task-4095488, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.522380] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': task-4095489, 'name': Rename_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.753234] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Releasing lock "refresh_cache-c45d2259-2a05-49d5-81eb-4c79ced83121" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 704.753974] env[69927]: DEBUG nova.compute.manager [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 704.754555] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 704.756187] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee0b832c-d841-4d4e-b806-8284a4cfdf77 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.766881] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 704.771514] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-15992914-c2a7-46d4-95ef-b2eb1fcb698c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.782823] env[69927]: DEBUG oslo_vmware.api [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Waiting for the task: (returnval){ [ 704.782823] env[69927]: value = "task-4095490" [ 704.782823] env[69927]: _type = "Task" [ 704.782823] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.790279] env[69927]: DEBUG nova.network.neutron [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Updated VIF entry in instance network info cache for port 9a088536-28fd-4645-87ba-e071c754db1e. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 704.790664] env[69927]: DEBUG nova.network.neutron [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Updating instance_info_cache with network_info: [{"id": "9a088536-28fd-4645-87ba-e071c754db1e", "address": "fa:16:3e:ad:f4:34", "network": {"id": "187adf43-df44-41b7-8dc1-b541b4f84fee", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1475247123-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b414ce037734749949fe7951f9ffd1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d8383707-f093-40a7-a5ba-31b0e07cac45", "external-id": "cl2-zone-18", "segmentation_id": 18, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a088536-28", "ovs_interfaceid": "9a088536-28fd-4645-87ba-e071c754db1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.797240] env[69927]: DEBUG oslo_vmware.api [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095490, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.875282] env[69927]: DEBUG nova.network.neutron [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Updating instance_info_cache with network_info: [{"id": "e4473f09-99e5-4ac3-b9cc-3316e06abd65", "address": "fa:16:3e:81:8d:c6", "network": {"id": "045356e7-ce71-4c33-9121-8655a915fc1d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1277828099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2fb1fc4c3ae41a5b331c6be7973eb72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4473f09-99", "ovs_interfaceid": "e4473f09-99e5-4ac3-b9cc-3316e06abd65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.899181] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f5d18b-3134-48c6-89b6-4c08af0489bd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.911502] env[69927]: DEBUG nova.network.neutron [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Successfully created port: 5615148b-36c9-40b6-9282-76bdcfb9931e {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 704.914921] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b40ca1ac-0936-44d7-afcd-f18cae8f5848 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.959812] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6cb8e3-f00c-466a-8803-0db3291ed35b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.962927] env[69927]: DEBUG nova.compute.manager [req-26901afe-8b68-4d16-b1e1-9e44a3192cfa req-74042f22-52fe-4cfc-9c8d-d5cdb546c284 service nova] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Received event network-vif-plugged-e4473f09-99e5-4ac3-b9cc-3316e06abd65 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 704.964729] env[69927]: DEBUG oslo_concurrency.lockutils [req-26901afe-8b68-4d16-b1e1-9e44a3192cfa req-74042f22-52fe-4cfc-9c8d-d5cdb546c284 service nova] Acquiring lock "128d0705-21a0-4103-ae84-85bbac7e718b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 704.964729] env[69927]: DEBUG oslo_concurrency.lockutils [req-26901afe-8b68-4d16-b1e1-9e44a3192cfa req-74042f22-52fe-4cfc-9c8d-d5cdb546c284 service nova] Lock "128d0705-21a0-4103-ae84-85bbac7e718b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 704.964729] env[69927]: DEBUG oslo_concurrency.lockutils [req-26901afe-8b68-4d16-b1e1-9e44a3192cfa req-74042f22-52fe-4cfc-9c8d-d5cdb546c284 service nova] Lock "128d0705-21a0-4103-ae84-85bbac7e718b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 704.964729] env[69927]: DEBUG nova.compute.manager [req-26901afe-8b68-4d16-b1e1-9e44a3192cfa req-74042f22-52fe-4cfc-9c8d-d5cdb546c284 service nova] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] No waiting events found dispatching network-vif-plugged-e4473f09-99e5-4ac3-b9cc-3316e06abd65 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 704.964729] env[69927]: WARNING nova.compute.manager [req-26901afe-8b68-4d16-b1e1-9e44a3192cfa req-74042f22-52fe-4cfc-9c8d-d5cdb546c284 service nova] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Received unexpected event network-vif-plugged-e4473f09-99e5-4ac3-b9cc-3316e06abd65 for instance with vm_state building and task_state spawning. [ 704.974955] env[69927]: INFO nova.compute.manager [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Took 25.37 seconds to build instance. [ 704.976407] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d967610c-151e-4308-89e0-2ad5e04d06db {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.993674] env[69927]: DEBUG nova.compute.provider_tree [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 705.012475] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': task-4095488, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.016442] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': task-4095489, 'name': Rename_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.066993] env[69927]: DEBUG nova.network.neutron [-] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.301279] env[69927]: DEBUG oslo_concurrency.lockutils [req-03e7d9ff-827d-4d15-92f5-d904a5eba5eb req-11fd1873-7ef6-44f9-965e-89fc42f1e992 service nova] Releasing lock "refresh_cache-8442f144-2be4-4634-b151-62f049a975b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 705.302107] env[69927]: DEBUG oslo_vmware.api [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095490, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.347200] env[69927]: DEBUG nova.compute.manager [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 705.380159] env[69927]: DEBUG nova.virt.hardware [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 705.380159] env[69927]: DEBUG nova.virt.hardware [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 705.380159] env[69927]: DEBUG nova.virt.hardware [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 705.386313] env[69927]: DEBUG nova.virt.hardware [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 705.386313] env[69927]: DEBUG nova.virt.hardware [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 705.386313] env[69927]: DEBUG nova.virt.hardware [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 705.386313] env[69927]: DEBUG nova.virt.hardware [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 705.386313] env[69927]: DEBUG nova.virt.hardware [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 705.389195] env[69927]: DEBUG nova.virt.hardware [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 705.389195] env[69927]: DEBUG nova.virt.hardware [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 705.389195] env[69927]: DEBUG nova.virt.hardware [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 705.389195] env[69927]: DEBUG oslo_concurrency.lockutils [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Releasing lock "refresh_cache-128d0705-21a0-4103-ae84-85bbac7e718b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 705.389807] env[69927]: DEBUG nova.compute.manager [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Instance network_info: |[{"id": "e4473f09-99e5-4ac3-b9cc-3316e06abd65", "address": "fa:16:3e:81:8d:c6", "network": {"id": "045356e7-ce71-4c33-9121-8655a915fc1d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1277828099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2fb1fc4c3ae41a5b331c6be7973eb72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4473f09-99", "ovs_interfaceid": "e4473f09-99e5-4ac3-b9cc-3316e06abd65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 705.389807] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2164c892-431e-4784-b29b-db76377d2db0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.391835] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:8d:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40c947c4-f471-4d48-8e43-fee54198107e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e4473f09-99e5-4ac3-b9cc-3316e06abd65', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 705.402880] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Creating folder: Project (c2fb1fc4c3ae41a5b331c6be7973eb72). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 705.403299] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4eae8a45-d77f-4819-9911-4a191a5c80cc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.416555] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3413f4fa-e800-4c4c-8b48-9f6ae9f00dac {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.422573] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Created folder: Project (c2fb1fc4c3ae41a5b331c6be7973eb72) in parent group-v811283. [ 705.422573] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Creating folder: Instances. Parent ref: group-v811332. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 705.423604] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-219b5fca-8584-4ca4-a397-518aecdd1719 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.450417] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Created folder: Instances in parent group-v811332. [ 705.450770] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 705.450982] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 705.451214] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ecaa6eae-f630-4476-bd33-4e3d549b7d86 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.476424] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 705.476424] env[69927]: value = "task-4095493" [ 705.476424] env[69927]: _type = "Task" [ 705.476424] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.481134] env[69927]: DEBUG oslo_concurrency.lockutils [None req-72cf7643-f180-4065-8182-082f74461e7a tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Lock "ee422a46-c6e4-4098-8f74-b9f0779d0fba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.275s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 705.494067] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095493, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.498191] env[69927]: DEBUG nova.scheduler.client.report [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 705.515616] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': task-4095488, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.846405} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.517861] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 8442f144-2be4-4634-b151-62f049a975b6/8442f144-2be4-4634-b151-62f049a975b6.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 705.517861] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 705.518171] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7a5941c0-cd66-4782-b7ee-291e819df6a9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.523929] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': task-4095489, 'name': Rename_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.530832] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Waiting for the task: (returnval){ [ 705.530832] env[69927]: value = "task-4095494" [ 705.530832] env[69927]: _type = "Task" [ 705.530832] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.541331] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': task-4095494, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.571993] env[69927]: INFO nova.compute.manager [-] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Took 1.92 seconds to deallocate network for instance. [ 705.798220] env[69927]: DEBUG oslo_vmware.api [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095490, 'name': PowerOffVM_Task, 'duration_secs': 0.674797} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.798639] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 705.799468] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 705.799468] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd89de5d-412f-484a-9adf-8fbaa9a0a798 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.827226] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 705.827930] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 705.827930] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Deleting the datastore file [datastore1] c45d2259-2a05-49d5-81eb-4c79ced83121 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 705.828057] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-591ab2a4-d4d3-4f35-9a58-69c37be41858 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.838098] env[69927]: DEBUG oslo_vmware.api [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Waiting for the task: (returnval){ [ 705.838098] env[69927]: value = "task-4095496" [ 705.838098] env[69927]: _type = "Task" [ 705.838098] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.849662] env[69927]: DEBUG oslo_vmware.api [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095496, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.915600] env[69927]: DEBUG nova.compute.manager [None req-d17e826c-cc2b-4530-bde2-1f8f0f0d7bd9 tempest-ServerDiagnosticsTest-394122660 tempest-ServerDiagnosticsTest-394122660-project-admin] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 705.917549] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9335a2fb-af09-4191-9765-b1ac1cd1d3c7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.928614] env[69927]: INFO nova.compute.manager [None req-d17e826c-cc2b-4530-bde2-1f8f0f0d7bd9 tempest-ServerDiagnosticsTest-394122660 tempest-ServerDiagnosticsTest-394122660-project-admin] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Retrieving diagnostics [ 705.931043] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de68f383-0d92-4803-b453-46ad6a139274 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.990996] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095493, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.995472] env[69927]: DEBUG nova.compute.manager [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 706.012013] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.702s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.012638] env[69927]: DEBUG nova.compute.manager [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 706.016013] env[69927]: DEBUG oslo_concurrency.lockutils [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.607s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.016013] env[69927]: DEBUG nova.objects.instance [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] Lazy-loading 'resources' on Instance uuid 053f6f00-a818-473b-a887-4ec45174c1d5 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 706.024328] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': task-4095489, 'name': Rename_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.046617] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': task-4095494, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10462} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.048138] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 706.053066] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca3481b-2370-4bae-8d45-2e384270ca5e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.079893] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Reconfiguring VM instance instance-0000000f to attach disk [datastore2] 8442f144-2be4-4634-b151-62f049a975b6/8442f144-2be4-4634-b151-62f049a975b6.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 706.081347] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.081560] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d25b70d-58b8-4bde-9030-662919639e37 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.104584] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Waiting for the task: (returnval){ [ 706.104584] env[69927]: value = "task-4095497" [ 706.104584] env[69927]: _type = "Task" [ 706.104584] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.117695] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': task-4095497, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.190475] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "9d83dda3-5fb1-416d-9307-faeef454efec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.190475] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "9d83dda3-5fb1-416d-9307-faeef454efec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.353571] env[69927]: DEBUG oslo_vmware.api [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Task: {'id': task-4095496, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.110522} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.353571] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 706.353571] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 706.353571] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 706.353571] env[69927]: INFO nova.compute.manager [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Took 1.60 seconds to destroy the instance on the hypervisor. [ 706.354112] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 706.354112] env[69927]: DEBUG nova.compute.manager [-] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 706.354112] env[69927]: DEBUG nova.network.neutron [-] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 706.373508] env[69927]: DEBUG nova.network.neutron [-] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 706.492629] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquiring lock "95c02aa2-d587-4c9f-9b02-2992dfe5b1be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.493251] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "95c02aa2-d587-4c9f-9b02-2992dfe5b1be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.504480] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095493, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.517905] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': task-4095489, 'name': Rename_Task, 'duration_secs': 2.008682} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.519314] env[69927]: DEBUG nova.compute.utils [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 706.524692] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 706.524692] env[69927]: DEBUG nova.compute.manager [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 706.524692] env[69927]: DEBUG nova.network.neutron [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 706.526343] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f3873dd-3905-485f-9a26-a64a0dc0b229 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.531231] env[69927]: DEBUG oslo_concurrency.lockutils [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.537089] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Waiting for the task: (returnval){ [ 706.537089] env[69927]: value = "task-4095498" [ 706.537089] env[69927]: _type = "Task" [ 706.537089] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.548477] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': task-4095498, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.596227] env[69927]: DEBUG nova.policy [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '068ad151032b4d939579825bdfffabcd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b875cf10c4074fe5b9e6497c5e823fa9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 706.619260] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': task-4095497, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.881583] env[69927]: DEBUG nova.network.neutron [-] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.998412] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095493, 'name': CreateVM_Task, 'duration_secs': 1.432673} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.001996] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 707.002917] env[69927]: DEBUG oslo_concurrency.lockutils [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.003123] env[69927]: DEBUG oslo_concurrency.lockutils [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.003484] env[69927]: DEBUG oslo_concurrency.lockutils [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 707.003801] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8226e235-e3b1-4cb7-ba12-5736c7e2fdc7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.013069] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 707.013069] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526e0062-33c9-f28e-a9d6-36e570344e45" [ 707.013069] env[69927]: _type = "Task" [ 707.013069] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.031014] env[69927]: DEBUG nova.compute.manager [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 707.034484] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526e0062-33c9-f28e-a9d6-36e570344e45, 'name': SearchDatastore_Task, 'duration_secs': 0.012262} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.034925] env[69927]: DEBUG oslo_concurrency.lockutils [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 707.035241] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 707.037294] env[69927]: DEBUG oslo_concurrency.lockutils [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.037294] env[69927]: DEBUG oslo_concurrency.lockutils [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.037294] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 707.037294] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-daf83039-fee6-45cd-927e-e5e28d9fd058 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.055676] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': task-4095498, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.057188] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 707.060952] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 707.062114] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-063987d7-0304-441d-9fee-b7b57623810c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.070219] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 707.070219] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522677d0-f0c4-17d2-7f8b-76e66289d404" [ 707.070219] env[69927]: _type = "Task" [ 707.070219] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.085571] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522677d0-f0c4-17d2-7f8b-76e66289d404, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.107224] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8f00d82-4a67-416f-91ee-f6fedfaf7419 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.133209] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a415597d-e4fc-4629-9197-c8e4b0c83d52 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.141075] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': task-4095497, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.183929] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5592ba7d-e21d-46ce-b909-c3dfd6895184 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.196056] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-743fcbce-7b0c-429d-90ea-0e36a93ac72c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.217024] env[69927]: DEBUG nova.compute.provider_tree [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 707.296212] env[69927]: DEBUG nova.network.neutron [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Successfully updated port: 5615148b-36c9-40b6-9282-76bdcfb9931e {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 707.384755] env[69927]: INFO nova.compute.manager [-] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Took 1.03 seconds to deallocate network for instance. [ 707.443502] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquiring lock "bf4bee47-36ce-43ee-96f1-96f262882986" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 707.443839] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Lock "bf4bee47-36ce-43ee-96f1-96f262882986" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 707.496933] env[69927]: DEBUG nova.network.neutron [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Successfully created port: 043de9ef-5509-4243-acb7-a19660d6d813 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 707.538421] env[69927]: INFO nova.virt.block_device [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Booting with volume fc20d8d5-9567-4a28-a803-7b54361ba124 at /dev/sda [ 707.566230] env[69927]: DEBUG oslo_vmware.api [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': task-4095498, 'name': PowerOnVM_Task, 'duration_secs': 0.67681} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.566745] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 707.567014] env[69927]: INFO nova.compute.manager [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Took 12.34 seconds to spawn the instance on the hypervisor. [ 707.567495] env[69927]: DEBUG nova.compute.manager [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 707.569641] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c581989e-283c-4857-b2e5-03ce76cf6750 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.589234] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522677d0-f0c4-17d2-7f8b-76e66289d404, 'name': SearchDatastore_Task, 'duration_secs': 0.015676} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.591566] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc937b7c-4ddc-4bbb-9657-e12b29e8c4d8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.602743] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 707.602743] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529f24bc-7f04-3370-1090-7437e60deaaa" [ 707.602743] env[69927]: _type = "Task" [ 707.602743] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.613169] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-966d1842-fcc7-416b-8141-b66b3495d8dd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.625089] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529f24bc-7f04-3370-1090-7437e60deaaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.625468] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': task-4095497, 'name': ReconfigVM_Task, 'duration_secs': 1.099639} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.627110] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Reconfigured VM instance instance-0000000f to attach disk [datastore2] 8442f144-2be4-4634-b151-62f049a975b6/8442f144-2be4-4634-b151-62f049a975b6.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 707.628397] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2c35941f-e870-4bff-bf28-a13c6dedb1f8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.641503] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-786cac11-8fde-43c1-a7b6-661fd1e70318 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.662674] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Waiting for the task: (returnval){ [ 707.662674] env[69927]: value = "task-4095499" [ 707.662674] env[69927]: _type = "Task" [ 707.662674] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.677230] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': task-4095499, 'name': Rename_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.692716] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ad94fd04-216f-46c9-8209-b86279b2c94b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.705036] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ffdd32a-e6a2-4de4-b95b-e1d66f5c243a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.721662] env[69927]: DEBUG nova.scheduler.client.report [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 707.751426] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09506592-6c19-4b9f-a1b8-ecbf08e906d8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.763195] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-372cac74-d52b-44d7-ad13-c9b26dfde11d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.781941] env[69927]: DEBUG nova.virt.block_device [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Updating existing volume attachment record: db5d6acc-2c0f-419f-bb38-c0499201bf6e {{(pid=69927) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 707.800783] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquiring lock "refresh_cache-a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.800915] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquired lock "refresh_cache-a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.801113] env[69927]: DEBUG nova.network.neutron [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 707.893189] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 708.099225] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Acquiring lock "8edafb98-331a-45b8-8de8-4ba04b035ffd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 708.099844] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Lock "8edafb98-331a-45b8-8de8-4ba04b035ffd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 708.100143] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Acquiring lock "8edafb98-331a-45b8-8de8-4ba04b035ffd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 708.100535] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Lock "8edafb98-331a-45b8-8de8-4ba04b035ffd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 708.100873] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Lock "8edafb98-331a-45b8-8de8-4ba04b035ffd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 708.105140] env[69927]: INFO nova.compute.manager [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Took 26.90 seconds to build instance. [ 708.119533] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529f24bc-7f04-3370-1090-7437e60deaaa, 'name': SearchDatastore_Task, 'duration_secs': 0.019613} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.120516] env[69927]: DEBUG oslo_concurrency.lockutils [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.120860] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 128d0705-21a0-4103-ae84-85bbac7e718b/128d0705-21a0-4103-ae84-85bbac7e718b.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 708.121555] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a2294d56-3548-4de5-8ba9-eaea438c262f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.137389] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 708.137389] env[69927]: value = "task-4095500" [ 708.137389] env[69927]: _type = "Task" [ 708.137389] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.138574] env[69927]: INFO nova.compute.manager [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Terminating instance [ 708.155110] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095500, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.176414] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': task-4095499, 'name': Rename_Task, 'duration_secs': 0.3262} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.176733] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 708.176991] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ed7d53b1-855c-4ebb-8a7e-df826263e022 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.187102] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Waiting for the task: (returnval){ [ 708.187102] env[69927]: value = "task-4095501" [ 708.187102] env[69927]: _type = "Task" [ 708.187102] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.198191] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': task-4095501, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.227722] env[69927]: DEBUG oslo_concurrency.lockutils [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.212s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 708.230306] env[69927]: DEBUG oslo_concurrency.lockutils [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.102s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 708.231367] env[69927]: DEBUG nova.objects.instance [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Lazy-loading 'resources' on Instance uuid c3c36508-96e1-4e75-931b-c7f0740b74e1 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 708.260821] env[69927]: INFO nova.scheduler.client.report [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] Deleted allocations for instance 053f6f00-a818-473b-a887-4ec45174c1d5 [ 708.307112] env[69927]: DEBUG nova.compute.manager [req-eb27f5f6-92ac-41f5-a061-7d6981412e95 req-43f538e4-49b5-473d-b202-c45c8e586b57 service nova] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Received event network-changed-e4473f09-99e5-4ac3-b9cc-3316e06abd65 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 708.307395] env[69927]: DEBUG nova.compute.manager [req-eb27f5f6-92ac-41f5-a061-7d6981412e95 req-43f538e4-49b5-473d-b202-c45c8e586b57 service nova] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Refreshing instance network info cache due to event network-changed-e4473f09-99e5-4ac3-b9cc-3316e06abd65. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 708.307631] env[69927]: DEBUG oslo_concurrency.lockutils [req-eb27f5f6-92ac-41f5-a061-7d6981412e95 req-43f538e4-49b5-473d-b202-c45c8e586b57 service nova] Acquiring lock "refresh_cache-128d0705-21a0-4103-ae84-85bbac7e718b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.307770] env[69927]: DEBUG oslo_concurrency.lockutils [req-eb27f5f6-92ac-41f5-a061-7d6981412e95 req-43f538e4-49b5-473d-b202-c45c8e586b57 service nova] Acquired lock "refresh_cache-128d0705-21a0-4103-ae84-85bbac7e718b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 708.307920] env[69927]: DEBUG nova.network.neutron [req-eb27f5f6-92ac-41f5-a061-7d6981412e95 req-43f538e4-49b5-473d-b202-c45c8e586b57 service nova] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Refreshing network info cache for port e4473f09-99e5-4ac3-b9cc-3316e06abd65 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 708.432774] env[69927]: DEBUG nova.network.neutron [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 708.612311] env[69927]: DEBUG oslo_concurrency.lockutils [None req-07314485-8dfe-469d-ae2e-d5ef0ec41c3d tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Lock "6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.896s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 708.647868] env[69927]: DEBUG nova.compute.manager [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 708.648251] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 708.652087] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-664b6069-df28-425c-abd5-5ec81e7b4c92 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.675719] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095500, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.678665] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 708.679080] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a2907c55-f0a1-423b-a01f-05c893e03b41 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.687221] env[69927]: DEBUG oslo_vmware.api [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Waiting for the task: (returnval){ [ 708.687221] env[69927]: value = "task-4095502" [ 708.687221] env[69927]: _type = "Task" [ 708.687221] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.707165] env[69927]: DEBUG oslo_vmware.api [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': task-4095502, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.710611] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': task-4095501, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.773167] env[69927]: DEBUG oslo_concurrency.lockutils [None req-55d53a20-fb27-461e-8adb-b533a80f89d6 tempest-DeleteServersAdminTestJSON-1423119463 tempest-DeleteServersAdminTestJSON-1423119463-project-admin] Lock "053f6f00-a818-473b-a887-4ec45174c1d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.053s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 708.804062] env[69927]: DEBUG oslo_concurrency.lockutils [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Acquiring lock "c6a06550-33ed-4fee-bd37-3fce9c55b235" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 708.804763] env[69927]: DEBUG oslo_concurrency.lockutils [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Lock "c6a06550-33ed-4fee-bd37-3fce9c55b235" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 708.984950] env[69927]: DEBUG nova.network.neutron [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Updating instance_info_cache with network_info: [{"id": "5615148b-36c9-40b6-9282-76bdcfb9931e", "address": "fa:16:3e:91:a6:13", "network": {"id": "6ec78f5d-4de4-4f02-a771-97092e489acc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1456290359-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d28bd5c5feee4a39b76694d57eb3aaf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5615148b-36", "ovs_interfaceid": "5615148b-36c9-40b6-9282-76bdcfb9931e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.116181] env[69927]: DEBUG nova.compute.manager [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 709.163214] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095500, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.839809} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.164066] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 128d0705-21a0-4103-ae84-85bbac7e718b/128d0705-21a0-4103-ae84-85bbac7e718b.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 709.164418] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 709.164800] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d6077e14-73a3-486e-b80b-a8efe552902d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.176479] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 709.176479] env[69927]: value = "task-4095503" [ 709.176479] env[69927]: _type = "Task" [ 709.176479] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.203825] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095503, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.215969] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': task-4095501, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.216234] env[69927]: DEBUG oslo_vmware.api [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': task-4095502, 'name': PowerOffVM_Task, 'duration_secs': 0.390021} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.216442] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 709.216679] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 709.216884] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-05bf385f-f505-4ef9-a03d-fd07056ad961 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.333026] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 709.333026] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 709.333026] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Deleting the datastore file [datastore1] 8edafb98-331a-45b8-8de8-4ba04b035ffd {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 709.333026] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e7fc0a3-b16d-48ca-9132-e29c900635d6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.338835] env[69927]: DEBUG oslo_vmware.api [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Waiting for the task: (returnval){ [ 709.338835] env[69927]: value = "task-4095505" [ 709.338835] env[69927]: _type = "Task" [ 709.338835] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.353128] env[69927]: DEBUG oslo_vmware.api [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': task-4095505, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.399346] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c4edda-15c7-435a-b347-e476734f79ae {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.408538] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd39b8a-24c1-442b-a902-dd6040dba486 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.446499] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e9d76b4-ac27-4633-a124-f963299fd56f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.456126] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48cb1cdc-5cde-4a8d-9061-0b29ab056ce9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.470777] env[69927]: DEBUG nova.compute.provider_tree [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 709.488100] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Releasing lock "refresh_cache-a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 709.488425] env[69927]: DEBUG nova.compute.manager [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Instance network_info: |[{"id": "5615148b-36c9-40b6-9282-76bdcfb9931e", "address": "fa:16:3e:91:a6:13", "network": {"id": "6ec78f5d-4de4-4f02-a771-97092e489acc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1456290359-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d28bd5c5feee4a39b76694d57eb3aaf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5615148b-36", "ovs_interfaceid": "5615148b-36c9-40b6-9282-76bdcfb9931e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 709.488876] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:a6:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6966f473-59ac-49bb-9b7a-22c61f4e61e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5615148b-36c9-40b6-9282-76bdcfb9931e', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 709.498277] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Creating folder: Project (d28bd5c5feee4a39b76694d57eb3aaf0). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 709.498789] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6b018f76-123d-4703-b9a5-76fcea450437 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.510682] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Created folder: Project (d28bd5c5feee4a39b76694d57eb3aaf0) in parent group-v811283. [ 709.511481] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Creating folder: Instances. Parent ref: group-v811335. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 709.511481] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f6008018-4738-4f75-9454-e4991c4a5cc7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.524980] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Created folder: Instances in parent group-v811335. [ 709.525268] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 709.525472] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 709.525686] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9ca071f-25c6-40a8-88d6-5ec876cb6ad2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.549020] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 709.549020] env[69927]: value = "task-4095508" [ 709.549020] env[69927]: _type = "Task" [ 709.549020] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.557059] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095508, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.558597] env[69927]: DEBUG nova.network.neutron [req-eb27f5f6-92ac-41f5-a061-7d6981412e95 req-43f538e4-49b5-473d-b202-c45c8e586b57 service nova] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Updated VIF entry in instance network info cache for port e4473f09-99e5-4ac3-b9cc-3316e06abd65. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 709.558946] env[69927]: DEBUG nova.network.neutron [req-eb27f5f6-92ac-41f5-a061-7d6981412e95 req-43f538e4-49b5-473d-b202-c45c8e586b57 service nova] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Updating instance_info_cache with network_info: [{"id": "e4473f09-99e5-4ac3-b9cc-3316e06abd65", "address": "fa:16:3e:81:8d:c6", "network": {"id": "045356e7-ce71-4c33-9121-8655a915fc1d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1277828099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2fb1fc4c3ae41a5b331c6be7973eb72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4473f09-99", "ovs_interfaceid": "e4473f09-99e5-4ac3-b9cc-3316e06abd65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.646458] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.690335] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095503, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087391} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.690335] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 709.690335] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d22f2a2-3f64-4b50-988d-dfa7b82e20ba {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.718594] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Reconfiguring VM instance instance-00000010 to attach disk [datastore2] 128d0705-21a0-4103-ae84-85bbac7e718b/128d0705-21a0-4103-ae84-85bbac7e718b.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 709.721956] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e339bde-689f-4fda-8a76-1812a2b1979e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.738813] env[69927]: DEBUG oslo_vmware.api [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': task-4095501, 'name': PowerOnVM_Task, 'duration_secs': 1.104173} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.739654] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 709.739936] env[69927]: INFO nova.compute.manager [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Took 11.90 seconds to spawn the instance on the hypervisor. [ 709.740174] env[69927]: DEBUG nova.compute.manager [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 709.741830] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a983df34-4119-49d4-af3e-be5cbcb2df77 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.747877] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 709.747877] env[69927]: value = "task-4095509" [ 709.747877] env[69927]: _type = "Task" [ 709.747877] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.764745] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095509, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.853727] env[69927]: DEBUG oslo_vmware.api [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': task-4095505, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.974659] env[69927]: DEBUG nova.scheduler.client.report [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 710.061600] env[69927]: DEBUG oslo_concurrency.lockutils [req-eb27f5f6-92ac-41f5-a061-7d6981412e95 req-43f538e4-49b5-473d-b202-c45c8e586b57 service nova] Releasing lock "refresh_cache-128d0705-21a0-4103-ae84-85bbac7e718b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 710.061812] env[69927]: DEBUG nova.compute.manager [req-eb27f5f6-92ac-41f5-a061-7d6981412e95 req-43f538e4-49b5-473d-b202-c45c8e586b57 service nova] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Received event network-vif-deleted-6c93c813-14c8-4f76-918b-6efb59510588 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 710.062080] env[69927]: DEBUG nova.compute.manager [req-eb27f5f6-92ac-41f5-a061-7d6981412e95 req-43f538e4-49b5-473d-b202-c45c8e586b57 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Received event network-vif-plugged-5615148b-36c9-40b6-9282-76bdcfb9931e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 710.062353] env[69927]: DEBUG oslo_concurrency.lockutils [req-eb27f5f6-92ac-41f5-a061-7d6981412e95 req-43f538e4-49b5-473d-b202-c45c8e586b57 service nova] Acquiring lock "a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 710.062727] env[69927]: DEBUG oslo_concurrency.lockutils [req-eb27f5f6-92ac-41f5-a061-7d6981412e95 req-43f538e4-49b5-473d-b202-c45c8e586b57 service nova] Lock "a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 710.063321] env[69927]: DEBUG oslo_concurrency.lockutils [req-eb27f5f6-92ac-41f5-a061-7d6981412e95 req-43f538e4-49b5-473d-b202-c45c8e586b57 service nova] Lock "a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.063321] env[69927]: DEBUG nova.compute.manager [req-eb27f5f6-92ac-41f5-a061-7d6981412e95 req-43f538e4-49b5-473d-b202-c45c8e586b57 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] No waiting events found dispatching network-vif-plugged-5615148b-36c9-40b6-9282-76bdcfb9931e {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 710.063501] env[69927]: WARNING nova.compute.manager [req-eb27f5f6-92ac-41f5-a061-7d6981412e95 req-43f538e4-49b5-473d-b202-c45c8e586b57 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Received unexpected event network-vif-plugged-5615148b-36c9-40b6-9282-76bdcfb9931e for instance with vm_state building and task_state spawning. [ 710.063767] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095508, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.103902] env[69927]: DEBUG nova.compute.manager [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 710.103902] env[69927]: DEBUG nova.virt.hardware [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 710.104339] env[69927]: DEBUG nova.virt.hardware [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 710.104423] env[69927]: DEBUG nova.virt.hardware [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 710.105293] env[69927]: DEBUG nova.virt.hardware [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 710.105293] env[69927]: DEBUG nova.virt.hardware [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 710.106365] env[69927]: DEBUG nova.virt.hardware [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 710.106365] env[69927]: DEBUG nova.virt.hardware [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 710.106365] env[69927]: DEBUG nova.virt.hardware [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 710.106631] env[69927]: DEBUG nova.virt.hardware [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 710.107576] env[69927]: DEBUG nova.virt.hardware [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 710.107576] env[69927]: DEBUG nova.virt.hardware [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 710.110031] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6566ccc-0bc4-4d4c-b710-2788ed782d60 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.123194] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-257b152a-93e5-4f78-ba61-0139329aa3c6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.215688] env[69927]: DEBUG nova.network.neutron [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Successfully updated port: 043de9ef-5509-4243-acb7-a19660d6d813 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 710.271055] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095509, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.272744] env[69927]: INFO nova.compute.manager [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Took 29.00 seconds to build instance. [ 710.355912] env[69927]: DEBUG oslo_vmware.api [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Task: {'id': task-4095505, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.636113} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.357040] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 710.357040] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 710.357040] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 710.357040] env[69927]: INFO nova.compute.manager [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Took 1.71 seconds to destroy the instance on the hypervisor. [ 710.357369] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 710.357369] env[69927]: DEBUG nova.compute.manager [-] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 710.357472] env[69927]: DEBUG nova.network.neutron [-] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 710.489742] env[69927]: DEBUG oslo_concurrency.lockutils [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.260s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.493865] env[69927]: DEBUG oslo_concurrency.lockutils [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.499s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 710.494137] env[69927]: DEBUG nova.objects.instance [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Lazy-loading 'resources' on Instance uuid 4ad26720-ed24-4963-9519-3345dbfeb9a2 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 710.523285] env[69927]: INFO nova.scheduler.client.report [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Deleted allocations for instance c3c36508-96e1-4e75-931b-c7f0740b74e1 [ 710.562555] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095508, 'name': CreateVM_Task, 'duration_secs': 0.606394} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.567978] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 710.567978] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.567978] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 710.567978] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 710.567978] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31e110a4-1943-4937-b47e-e8674060b2fe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.574401] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Waiting for the task: (returnval){ [ 710.574401] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52dacbb8-4cbb-4fd5-5b7b-e015166f8b4f" [ 710.574401] env[69927]: _type = "Task" [ 710.574401] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.585935] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52dacbb8-4cbb-4fd5-5b7b-e015166f8b4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.724650] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Acquiring lock "refresh_cache-5f67d6a0-e4b7-435e-8991-0f54e0379d22" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.724650] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Acquired lock "refresh_cache-5f67d6a0-e4b7-435e-8991-0f54e0379d22" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 710.724650] env[69927]: DEBUG nova.network.neutron [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 710.765032] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095509, 'name': ReconfigVM_Task, 'duration_secs': 0.540533} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.765032] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Reconfigured VM instance instance-00000010 to attach disk [datastore2] 128d0705-21a0-4103-ae84-85bbac7e718b/128d0705-21a0-4103-ae84-85bbac7e718b.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 710.765296] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3e56ca0a-1e29-4a51-9131-4c14a78f2b04 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.773263] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 710.773263] env[69927]: value = "task-4095510" [ 710.773263] env[69927]: _type = "Task" [ 710.773263] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.773725] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84dc20c4-ebcd-47df-be3c-8938be877144 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Lock "8442f144-2be4-4634-b151-62f049a975b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.881s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.786749] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095510, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.037049] env[69927]: DEBUG oslo_concurrency.lockutils [None req-66ef7af8-202a-45b6-9ea7-fa3702297090 tempest-ImagesNegativeTestJSON-1748246780 tempest-ImagesNegativeTestJSON-1748246780-project-member] Lock "c3c36508-96e1-4e75-931b-c7f0740b74e1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.006s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 711.094255] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52dacbb8-4cbb-4fd5-5b7b-e015166f8b4f, 'name': SearchDatastore_Task, 'duration_secs': 0.02033} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.097399] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 711.098233] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 711.098725] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.098915] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 711.099149] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 711.100420] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ebfe197-8717-4277-a015-77d2c606d6ec {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.118717] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 711.120551] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 711.120551] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecfb56f8-485c-4e86-bb81-98c57bd9b36f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.129841] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Waiting for the task: (returnval){ [ 711.129841] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d341d3-6f00-931f-f0d1-877e9c43670e" [ 711.129841] env[69927]: _type = "Task" [ 711.129841] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.150997] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d341d3-6f00-931f-f0d1-877e9c43670e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.189185] env[69927]: DEBUG nova.compute.manager [req-09b7f639-a166-43e0-963a-e3f8efc61aa9 req-d39e720c-b9a5-4238-9fbd-4722265c1829 service nova] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Received event network-vif-plugged-043de9ef-5509-4243-acb7-a19660d6d813 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 711.189397] env[69927]: DEBUG oslo_concurrency.lockutils [req-09b7f639-a166-43e0-963a-e3f8efc61aa9 req-d39e720c-b9a5-4238-9fbd-4722265c1829 service nova] Acquiring lock "5f67d6a0-e4b7-435e-8991-0f54e0379d22-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.189635] env[69927]: DEBUG oslo_concurrency.lockutils [req-09b7f639-a166-43e0-963a-e3f8efc61aa9 req-d39e720c-b9a5-4238-9fbd-4722265c1829 service nova] Lock "5f67d6a0-e4b7-435e-8991-0f54e0379d22-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.189771] env[69927]: DEBUG oslo_concurrency.lockutils [req-09b7f639-a166-43e0-963a-e3f8efc61aa9 req-d39e720c-b9a5-4238-9fbd-4722265c1829 service nova] Lock "5f67d6a0-e4b7-435e-8991-0f54e0379d22-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 711.189904] env[69927]: DEBUG nova.compute.manager [req-09b7f639-a166-43e0-963a-e3f8efc61aa9 req-d39e720c-b9a5-4238-9fbd-4722265c1829 service nova] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] No waiting events found dispatching network-vif-plugged-043de9ef-5509-4243-acb7-a19660d6d813 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 711.192123] env[69927]: WARNING nova.compute.manager [req-09b7f639-a166-43e0-963a-e3f8efc61aa9 req-d39e720c-b9a5-4238-9fbd-4722265c1829 service nova] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Received unexpected event network-vif-plugged-043de9ef-5509-4243-acb7-a19660d6d813 for instance with vm_state building and task_state spawning. [ 711.283392] env[69927]: DEBUG nova.compute.manager [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 711.286167] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095510, 'name': Rename_Task, 'duration_secs': 0.24721} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.287756] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 711.287756] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c864c048-37a4-4caa-9e99-1ec9dcffcd98 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.301246] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 711.301246] env[69927]: value = "task-4095511" [ 711.301246] env[69927]: _type = "Task" [ 711.301246] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.313924] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095511, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.330248] env[69927]: DEBUG nova.network.neutron [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 711.605979] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a65d733e-e474-46bf-823d-0c57e421d2d3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.620960] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b33bf011-eb14-4161-9ba8-e09e65e5ff99 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.656611] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e983b053-fedc-42c0-a273-e940f064d2c7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.670031] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-709cdbd9-21b8-4119-867f-628202109cb5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.674250] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d341d3-6f00-931f-f0d1-877e9c43670e, 'name': SearchDatastore_Task, 'duration_secs': 0.027948} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.675446] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e27ed57-9a55-42bc-aae7-4e63db5304b2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.686550] env[69927]: DEBUG nova.compute.provider_tree [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 711.692431] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Waiting for the task: (returnval){ [ 711.692431] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521c1079-822d-180a-978c-339f5254617b" [ 711.692431] env[69927]: _type = "Task" [ 711.692431] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.695017] env[69927]: DEBUG nova.network.neutron [-] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.705037] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521c1079-822d-180a-978c-339f5254617b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.817328] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095511, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.817551] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.937245] env[69927]: DEBUG nova.network.neutron [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Updating instance_info_cache with network_info: [{"id": "043de9ef-5509-4243-acb7-a19660d6d813", "address": "fa:16:3e:21:e4:ce", "network": {"id": "590d3974-8666-4163-9279-c966932d74cb", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1006416743-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b875cf10c4074fe5b9e6497c5e823fa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bb10726-a946-47b9-b4b5-6916e3f14cc5", "external-id": "nsx-vlan-transportzone-609", "segmentation_id": 609, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap043de9ef-55", "ovs_interfaceid": "043de9ef-5509-4243-acb7-a19660d6d813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.144594] env[69927]: DEBUG nova.compute.manager [req-cc568309-791f-4e75-9c6c-6b54699096dc req-89202ba7-f5e2-4068-bef7-2a52cc5b27a9 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Received event network-changed-5615148b-36c9-40b6-9282-76bdcfb9931e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 712.144790] env[69927]: DEBUG nova.compute.manager [req-cc568309-791f-4e75-9c6c-6b54699096dc req-89202ba7-f5e2-4068-bef7-2a52cc5b27a9 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Refreshing instance network info cache due to event network-changed-5615148b-36c9-40b6-9282-76bdcfb9931e. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 712.145014] env[69927]: DEBUG oslo_concurrency.lockutils [req-cc568309-791f-4e75-9c6c-6b54699096dc req-89202ba7-f5e2-4068-bef7-2a52cc5b27a9 service nova] Acquiring lock "refresh_cache-a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.147353] env[69927]: DEBUG oslo_concurrency.lockutils [req-cc568309-791f-4e75-9c6c-6b54699096dc req-89202ba7-f5e2-4068-bef7-2a52cc5b27a9 service nova] Acquired lock "refresh_cache-a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 712.147353] env[69927]: DEBUG nova.network.neutron [req-cc568309-791f-4e75-9c6c-6b54699096dc req-89202ba7-f5e2-4068-bef7-2a52cc5b27a9 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Refreshing network info cache for port 5615148b-36c9-40b6-9282-76bdcfb9931e {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 712.192844] env[69927]: DEBUG nova.scheduler.client.report [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 712.198918] env[69927]: INFO nova.compute.manager [-] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Took 1.84 seconds to deallocate network for instance. [ 712.227807] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521c1079-822d-180a-978c-339f5254617b, 'name': SearchDatastore_Task, 'duration_secs': 0.012858} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.228793] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 712.229083] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5/a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 712.229360] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aa558485-33b2-4059-a41a-ce30814ed170 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.240700] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Waiting for the task: (returnval){ [ 712.240700] env[69927]: value = "task-4095512" [ 712.240700] env[69927]: _type = "Task" [ 712.240700] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.252923] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095512, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.319034] env[69927]: DEBUG oslo_vmware.api [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095511, 'name': PowerOnVM_Task, 'duration_secs': 0.757178} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.319034] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 712.319034] env[69927]: INFO nova.compute.manager [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Took 11.75 seconds to spawn the instance on the hypervisor. [ 712.319034] env[69927]: DEBUG nova.compute.manager [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 712.319034] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-022253a3-184d-4784-81c0-7cc6d4677020 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.366035] env[69927]: DEBUG oslo_concurrency.lockutils [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Acquiring lock "c2b6b943-f6d6-427f-aba5-1d619d889325" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 712.366035] env[69927]: DEBUG oslo_concurrency.lockutils [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Lock "c2b6b943-f6d6-427f-aba5-1d619d889325" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 712.444056] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Releasing lock "refresh_cache-5f67d6a0-e4b7-435e-8991-0f54e0379d22" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 712.444056] env[69927]: DEBUG nova.compute.manager [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Instance network_info: |[{"id": "043de9ef-5509-4243-acb7-a19660d6d813", "address": "fa:16:3e:21:e4:ce", "network": {"id": "590d3974-8666-4163-9279-c966932d74cb", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1006416743-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b875cf10c4074fe5b9e6497c5e823fa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bb10726-a946-47b9-b4b5-6916e3f14cc5", "external-id": "nsx-vlan-transportzone-609", "segmentation_id": 609, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap043de9ef-55", "ovs_interfaceid": "043de9ef-5509-4243-acb7-a19660d6d813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 712.444704] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:e4:ce', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7bb10726-a946-47b9-b4b5-6916e3f14cc5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '043de9ef-5509-4243-acb7-a19660d6d813', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 712.458026] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Creating folder: Project (b875cf10c4074fe5b9e6497c5e823fa9). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 712.459469] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-439a6657-7445-4e5b-b406-6edc1e7434d2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.476463] env[69927]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 712.476463] env[69927]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=69927) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 712.478249] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Folder already exists: Project (b875cf10c4074fe5b9e6497c5e823fa9). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 712.478488] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Creating folder: Instances. Parent ref: group-v811287. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 712.478866] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1cb42227-94e9-48ef-8f69-0e4ecbb384bf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.494802] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Created folder: Instances in parent group-v811287. [ 712.494976] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 712.495203] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 712.495410] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3e8f7717-744a-4968-90a9-13da29de434e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.520486] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 712.520486] env[69927]: value = "task-4095515" [ 712.520486] env[69927]: _type = "Task" [ 712.520486] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.532171] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095515, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.709581] env[69927]: DEBUG oslo_concurrency.lockutils [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.215s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 712.716275] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 20.449s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 712.720730] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 712.758184] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095512, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.762204] env[69927]: INFO nova.scheduler.client.report [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Deleted allocations for instance 4ad26720-ed24-4963-9519-3345dbfeb9a2 [ 712.841256] env[69927]: INFO nova.compute.manager [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Took 31.48 seconds to build instance. [ 713.032889] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095515, 'name': CreateVM_Task, 'duration_secs': 0.486859} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.033074] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 713.035599] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': 'db5d6acc-2c0f-419f-bb38-c0499201bf6e', 'delete_on_termination': True, 'device_type': None, 'boot_index': 0, 'guest_format': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811295', 'volume_id': 'fc20d8d5-9567-4a28-a803-7b54361ba124', 'name': 'volume-fc20d8d5-9567-4a28-a803-7b54361ba124', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5f67d6a0-e4b7-435e-8991-0f54e0379d22', 'attached_at': '', 'detached_at': '', 'volume_id': 'fc20d8d5-9567-4a28-a803-7b54361ba124', 'serial': 'fc20d8d5-9567-4a28-a803-7b54361ba124'}, 'mount_device': '/dev/sda', 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=69927) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 713.035599] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Root volume attach. Driver type: vmdk {{(pid=69927) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 713.035599] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c02cd0c-da8c-48e0-a25f-55a70cb9be11 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.046168] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41853bb3-feac-4c83-af5b-c6904d3388b3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.057195] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e0b3a7-4074-4d74-8682-6a1bbc46bc1d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.063705] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-65452838-9955-462a-bef0-bd7a930a5e85 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.072933] env[69927]: DEBUG oslo_vmware.api [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Waiting for the task: (returnval){ [ 713.072933] env[69927]: value = "task-4095516" [ 713.072933] env[69927]: _type = "Task" [ 713.072933] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.082824] env[69927]: DEBUG oslo_vmware.api [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4095516, 'name': RelocateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.224511] env[69927]: INFO nova.compute.claims [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 713.255528] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095512, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.743406} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.256852] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5/a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 713.256852] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 713.257374] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a2ab1b58-89c4-4634-8073-7db9128dcd91 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.262500] env[69927]: DEBUG nova.network.neutron [req-cc568309-791f-4e75-9c6c-6b54699096dc req-89202ba7-f5e2-4068-bef7-2a52cc5b27a9 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Updated VIF entry in instance network info cache for port 5615148b-36c9-40b6-9282-76bdcfb9931e. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 713.263239] env[69927]: DEBUG nova.network.neutron [req-cc568309-791f-4e75-9c6c-6b54699096dc req-89202ba7-f5e2-4068-bef7-2a52cc5b27a9 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Updating instance_info_cache with network_info: [{"id": "5615148b-36c9-40b6-9282-76bdcfb9931e", "address": "fa:16:3e:91:a6:13", "network": {"id": "6ec78f5d-4de4-4f02-a771-97092e489acc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1456290359-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d28bd5c5feee4a39b76694d57eb3aaf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5615148b-36", "ovs_interfaceid": "5615148b-36c9-40b6-9282-76bdcfb9931e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.266682] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Waiting for the task: (returnval){ [ 713.266682] env[69927]: value = "task-4095517" [ 713.266682] env[69927]: _type = "Task" [ 713.266682] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.275352] env[69927]: DEBUG oslo_concurrency.lockutils [None req-59899d96-783a-472d-ab55-86bb580bd7fb tempest-ServerDiagnosticsV248Test-165541197 tempest-ServerDiagnosticsV248Test-165541197-project-member] Lock "4ad26720-ed24-4963-9519-3345dbfeb9a2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.931s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 713.280540] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095517, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.345701] env[69927]: DEBUG oslo_concurrency.lockutils [None req-24600340-58ce-4be4-8190-29a12fe9006b tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "128d0705-21a0-4103-ae84-85bbac7e718b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.608s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 713.375239] env[69927]: DEBUG nova.compute.manager [None req-fac54a38-a277-46fe-bdd9-0e606bb6a518 tempest-ServerExternalEventsTest-1475384843 tempest-ServerExternalEventsTest-1475384843-project] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Received event network-changed {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 713.375444] env[69927]: DEBUG nova.compute.manager [None req-fac54a38-a277-46fe-bdd9-0e606bb6a518 tempest-ServerExternalEventsTest-1475384843 tempest-ServerExternalEventsTest-1475384843-project] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Refreshing instance network info cache due to event network-changed. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 713.376639] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fac54a38-a277-46fe-bdd9-0e606bb6a518 tempest-ServerExternalEventsTest-1475384843 tempest-ServerExternalEventsTest-1475384843-project] Acquiring lock "refresh_cache-6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.376639] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fac54a38-a277-46fe-bdd9-0e606bb6a518 tempest-ServerExternalEventsTest-1475384843 tempest-ServerExternalEventsTest-1475384843-project] Acquired lock "refresh_cache-6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 713.376839] env[69927]: DEBUG nova.network.neutron [None req-fac54a38-a277-46fe-bdd9-0e606bb6a518 tempest-ServerExternalEventsTest-1475384843 tempest-ServerExternalEventsTest-1475384843-project] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 713.593224] env[69927]: DEBUG oslo_vmware.api [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4095516, 'name': RelocateVM_Task} progress is 34%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.731610] env[69927]: INFO nova.compute.resource_tracker [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Updating resource usage from migration 997e4693-06f5-485a-a436-c0324b8f738d [ 713.769467] env[69927]: DEBUG oslo_concurrency.lockutils [req-cc568309-791f-4e75-9c6c-6b54699096dc req-89202ba7-f5e2-4068-bef7-2a52cc5b27a9 service nova] Releasing lock "refresh_cache-a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 713.784790] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095517, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.101514} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.784943] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 713.789244] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2aab93-002f-48e1-a134-751bfaded1ba {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.797739] env[69927]: DEBUG nova.compute.manager [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 713.812424] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af3b6b15-003d-410b-a287-c5c6c8e44e8e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.826348] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Reconfiguring VM instance instance-00000011 to attach disk [datastore2] a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5/a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 713.829388] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d014b80b-a38f-4d74-be94-1316d7cbb437 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.850103] env[69927]: DEBUG nova.compute.manager [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 713.861033] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Waiting for the task: (returnval){ [ 713.861033] env[69927]: value = "task-4095518" [ 713.861033] env[69927]: _type = "Task" [ 713.861033] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.889083] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095518, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.121235] env[69927]: DEBUG oslo_vmware.api [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4095516, 'name': RelocateVM_Task} progress is 43%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.365184] env[69927]: INFO nova.compute.manager [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] instance snapshotting [ 714.372702] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1756c1fd-2b76-4472-8f31-12b57b58e9d1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.406306] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095518, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.409145] env[69927]: DEBUG oslo_concurrency.lockutils [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.409145] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e1c78c-a6bb-42ca-a451-3904df813442 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.422325] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8484a750-e63b-44f9-8ff4-1d56c7c69c96 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.436390] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4380d37-a018-4a8c-a0b2-6e12692f14f3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.471814] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c84c21da-ccd8-4b4b-89d6-4d130a113b74 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.480142] env[69927]: DEBUG nova.network.neutron [None req-fac54a38-a277-46fe-bdd9-0e606bb6a518 tempest-ServerExternalEventsTest-1475384843 tempest-ServerExternalEventsTest-1475384843-project] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Updating instance_info_cache with network_info: [{"id": "68ff56b2-b063-423a-93e6-b3aba5245461", "address": "fa:16:3e:a1:0e:04", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.105", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68ff56b2-b0", "ovs_interfaceid": "68ff56b2-b063-423a-93e6-b3aba5245461", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.483478] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c35f7da-6134-44c3-8ab4-a5c8c51f1e7f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.501098] env[69927]: DEBUG nova.compute.provider_tree [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.595898] env[69927]: DEBUG oslo_vmware.api [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4095516, 'name': RelocateVM_Task} progress is 54%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.883582] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095518, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.910048] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Acquiring lock "cde9885b-1aa8-411d-847e-087fe375002b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.910048] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Lock "cde9885b-1aa8-411d-847e-087fe375002b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 714.936872] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Creating Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 714.937270] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-67e5dc57-e605-4f6d-ab61-389af579621e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.951271] env[69927]: DEBUG oslo_vmware.api [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Waiting for the task: (returnval){ [ 714.951271] env[69927]: value = "task-4095519" [ 714.951271] env[69927]: _type = "Task" [ 714.951271] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.967414] env[69927]: DEBUG oslo_vmware.api [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095519, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.983758] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fac54a38-a277-46fe-bdd9-0e606bb6a518 tempest-ServerExternalEventsTest-1475384843 tempest-ServerExternalEventsTest-1475384843-project] Releasing lock "refresh_cache-6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 715.006600] env[69927]: DEBUG nova.scheduler.client.report [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 715.097329] env[69927]: DEBUG oslo_vmware.api [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4095516, 'name': RelocateVM_Task} progress is 67%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.277633] env[69927]: DEBUG nova.compute.manager [req-0a198910-e62e-4bec-b7e6-89b121c5334b req-09ee1009-1ba9-4ac0-a017-85d94af74fa4 service nova] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Received event network-changed-043de9ef-5509-4243-acb7-a19660d6d813 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 715.277860] env[69927]: DEBUG nova.compute.manager [req-0a198910-e62e-4bec-b7e6-89b121c5334b req-09ee1009-1ba9-4ac0-a017-85d94af74fa4 service nova] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Refreshing instance network info cache due to event network-changed-043de9ef-5509-4243-acb7-a19660d6d813. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 715.280813] env[69927]: DEBUG oslo_concurrency.lockutils [req-0a198910-e62e-4bec-b7e6-89b121c5334b req-09ee1009-1ba9-4ac0-a017-85d94af74fa4 service nova] Acquiring lock "refresh_cache-5f67d6a0-e4b7-435e-8991-0f54e0379d22" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.280813] env[69927]: DEBUG oslo_concurrency.lockutils [req-0a198910-e62e-4bec-b7e6-89b121c5334b req-09ee1009-1ba9-4ac0-a017-85d94af74fa4 service nova] Acquired lock "refresh_cache-5f67d6a0-e4b7-435e-8991-0f54e0379d22" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 715.280813] env[69927]: DEBUG nova.network.neutron [req-0a198910-e62e-4bec-b7e6-89b121c5334b req-09ee1009-1ba9-4ac0-a017-85d94af74fa4 service nova] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Refreshing network info cache for port 043de9ef-5509-4243-acb7-a19660d6d813 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 715.383653] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095518, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.473927] env[69927]: DEBUG oslo_vmware.api [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095519, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.515150] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.801s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 715.515376] env[69927]: INFO nova.compute.manager [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Migrating [ 715.515617] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.515765] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquired lock "compute-rpcapi-router" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 715.521037] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.899s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.522270] env[69927]: INFO nova.compute.claims [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 715.601338] env[69927]: DEBUG oslo_vmware.api [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4095516, 'name': RelocateVM_Task} progress is 81%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.746848] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ccec716d-ecef-48a9-b923-61e19592fdff tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "128d0705-21a0-4103-ae84-85bbac7e718b" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.747219] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ccec716d-ecef-48a9-b923-61e19592fdff tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "128d0705-21a0-4103-ae84-85bbac7e718b" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.747426] env[69927]: DEBUG nova.compute.manager [None req-ccec716d-ecef-48a9-b923-61e19592fdff tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 715.748623] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fbbc95b-c5a4-4888-808e-ed9455799ec0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.757330] env[69927]: DEBUG nova.compute.manager [None req-ccec716d-ecef-48a9-b923-61e19592fdff tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69927) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 715.758612] env[69927]: DEBUG nova.objects.instance [None req-ccec716d-ecef-48a9-b923-61e19592fdff tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lazy-loading 'flavor' on Instance uuid 128d0705-21a0-4103-ae84-85bbac7e718b {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 715.885359] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095518, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.971200] env[69927]: DEBUG oslo_vmware.api [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095519, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.022393] env[69927]: INFO nova.compute.rpcapi [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 716.023055] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Releasing lock "compute-rpcapi-router" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 716.069222] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquiring lock "353ceb53-07e6-4e9b-bed5-ce9fca368b27" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 716.069589] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "353ceb53-07e6-4e9b-bed5-ce9fca368b27" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 716.098239] env[69927]: DEBUG nova.network.neutron [req-0a198910-e62e-4bec-b7e6-89b121c5334b req-09ee1009-1ba9-4ac0-a017-85d94af74fa4 service nova] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Updated VIF entry in instance network info cache for port 043de9ef-5509-4243-acb7-a19660d6d813. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 716.098467] env[69927]: DEBUG nova.network.neutron [req-0a198910-e62e-4bec-b7e6-89b121c5334b req-09ee1009-1ba9-4ac0-a017-85d94af74fa4 service nova] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Updating instance_info_cache with network_info: [{"id": "043de9ef-5509-4243-acb7-a19660d6d813", "address": "fa:16:3e:21:e4:ce", "network": {"id": "590d3974-8666-4163-9279-c966932d74cb", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1006416743-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b875cf10c4074fe5b9e6497c5e823fa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bb10726-a946-47b9-b4b5-6916e3f14cc5", "external-id": "nsx-vlan-transportzone-609", "segmentation_id": 609, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap043de9ef-55", "ovs_interfaceid": "043de9ef-5509-4243-acb7-a19660d6d813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.104981] env[69927]: DEBUG oslo_vmware.api [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4095516, 'name': RelocateVM_Task} progress is 95%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.193125] env[69927]: DEBUG nova.compute.manager [req-7d6d0b85-80d1-45f1-a75c-6c06fdb626df req-39743c9c-70ad-4837-aef3-a4e1648f34ce service nova] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Received event network-changed-4e3d0fe6-1171-4bdd-bfab-86bb4f7af637 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 716.193349] env[69927]: DEBUG nova.compute.manager [req-7d6d0b85-80d1-45f1-a75c-6c06fdb626df req-39743c9c-70ad-4837-aef3-a4e1648f34ce service nova] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Refreshing instance network info cache due to event network-changed-4e3d0fe6-1171-4bdd-bfab-86bb4f7af637. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 716.193649] env[69927]: DEBUG oslo_concurrency.lockutils [req-7d6d0b85-80d1-45f1-a75c-6c06fdb626df req-39743c9c-70ad-4837-aef3-a4e1648f34ce service nova] Acquiring lock "refresh_cache-ee422a46-c6e4-4098-8f74-b9f0779d0fba" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.193785] env[69927]: DEBUG oslo_concurrency.lockutils [req-7d6d0b85-80d1-45f1-a75c-6c06fdb626df req-39743c9c-70ad-4837-aef3-a4e1648f34ce service nova] Acquired lock "refresh_cache-ee422a46-c6e4-4098-8f74-b9f0779d0fba" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 716.195849] env[69927]: DEBUG nova.network.neutron [req-7d6d0b85-80d1-45f1-a75c-6c06fdb626df req-39743c9c-70ad-4837-aef3-a4e1648f34ce service nova] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Refreshing network info cache for port 4e3d0fe6-1171-4bdd-bfab-86bb4f7af637 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 716.386131] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095518, 'name': ReconfigVM_Task, 'duration_secs': 2.490527} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.386131] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Reconfigured VM instance instance-00000011 to attach disk [datastore2] a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5/a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 716.386131] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-15e5be74-34a3-4d2c-be56-20a6f824c231 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.393315] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Waiting for the task: (returnval){ [ 716.393315] env[69927]: value = "task-4095520" [ 716.393315] env[69927]: _type = "Task" [ 716.393315] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.405827] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095520, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.467139] env[69927]: DEBUG oslo_vmware.api [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095519, 'name': CreateSnapshot_Task, 'duration_secs': 1.507317} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.467428] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Created Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 716.468209] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d11aa10-592d-48bc-8098-5ac1b98a562a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.559025] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "refresh_cache-9348e368-cc3c-4bde-91ae-26fd03ad536a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.559025] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquired lock "refresh_cache-9348e368-cc3c-4bde-91ae-26fd03ad536a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 716.559025] env[69927]: DEBUG nova.network.neutron [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 716.600939] env[69927]: DEBUG oslo_vmware.api [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4095516, 'name': RelocateVM_Task} progress is 97%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.605603] env[69927]: DEBUG oslo_concurrency.lockutils [req-0a198910-e62e-4bec-b7e6-89b121c5334b req-09ee1009-1ba9-4ac0-a017-85d94af74fa4 service nova] Releasing lock "refresh_cache-5f67d6a0-e4b7-435e-8991-0f54e0379d22" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 716.605864] env[69927]: DEBUG nova.compute.manager [req-0a198910-e62e-4bec-b7e6-89b121c5334b req-09ee1009-1ba9-4ac0-a017-85d94af74fa4 service nova] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Received event network-vif-deleted-34f427e8-f150-431f-960f-584ea344457c {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 716.632760] env[69927]: DEBUG oslo_concurrency.lockutils [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Acquiring lock "6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 716.632760] env[69927]: DEBUG oslo_concurrency.lockutils [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Lock "6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 716.632760] env[69927]: DEBUG oslo_concurrency.lockutils [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Acquiring lock "6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 716.632760] env[69927]: DEBUG oslo_concurrency.lockutils [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Lock "6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 716.633129] env[69927]: DEBUG oslo_concurrency.lockutils [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Lock "6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 716.637784] env[69927]: INFO nova.compute.manager [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Terminating instance [ 716.765901] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccec716d-ecef-48a9-b923-61e19592fdff tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 716.767081] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7fdd5b23-7899-4194-aa81-4c11abad7d49 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.775180] env[69927]: DEBUG oslo_vmware.api [None req-ccec716d-ecef-48a9-b923-61e19592fdff tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 716.775180] env[69927]: value = "task-4095521" [ 716.775180] env[69927]: _type = "Task" [ 716.775180] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.784769] env[69927]: DEBUG oslo_vmware.api [None req-ccec716d-ecef-48a9-b923-61e19592fdff tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095521, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.835678] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "0e6e60e7-d623-44da-912e-804da4d616c9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 716.835974] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "0e6e60e7-d623-44da-912e-804da4d616c9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 716.871336] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "915797c5-6f68-4355-a6b0-ad2b06b826cb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 716.871587] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "915797c5-6f68-4355-a6b0-ad2b06b826cb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 716.913838] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095520, 'name': Rename_Task, 'duration_secs': 0.499918} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.913838] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 716.914333] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b34788b-1bed-4c05-8ed9-e949cd48f73f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.932625] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Waiting for the task: (returnval){ [ 716.932625] env[69927]: value = "task-4095522" [ 716.932625] env[69927]: _type = "Task" [ 716.932625] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.941214] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095522, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.988195] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Creating linked-clone VM from snapshot {{(pid=69927) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 716.992010] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f92f2d24-4bca-47dc-8d1e-bee83aa7e8d8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.005031] env[69927]: DEBUG oslo_vmware.api [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Waiting for the task: (returnval){ [ 717.005031] env[69927]: value = "task-4095523" [ 717.005031] env[69927]: _type = "Task" [ 717.005031] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.016800] env[69927]: DEBUG oslo_vmware.api [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095523, 'name': CloneVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.036888] env[69927]: DEBUG nova.network.neutron [req-7d6d0b85-80d1-45f1-a75c-6c06fdb626df req-39743c9c-70ad-4837-aef3-a4e1648f34ce service nova] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Updated VIF entry in instance network info cache for port 4e3d0fe6-1171-4bdd-bfab-86bb4f7af637. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 717.037449] env[69927]: DEBUG nova.network.neutron [req-7d6d0b85-80d1-45f1-a75c-6c06fdb626df req-39743c9c-70ad-4837-aef3-a4e1648f34ce service nova] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Updating instance_info_cache with network_info: [{"id": "4e3d0fe6-1171-4bdd-bfab-86bb4f7af637", "address": "fa:16:3e:46:5d:de", "network": {"id": "d34a6e93-05fd-40da-b452-332d47c34037", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1653403369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "179addc4042d4c65b15c008132e74bf6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e3d0fe6-11", "ovs_interfaceid": "4e3d0fe6-1171-4bdd-bfab-86bb4f7af637", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.105839] env[69927]: DEBUG oslo_vmware.api [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4095516, 'name': RelocateVM_Task} progress is 97%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.144591] env[69927]: DEBUG nova.compute.manager [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 717.145731] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 717.151618] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb75b06-32df-48b3-9ff4-0a4a2eebdb40 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.164025] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 717.164192] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6f3ecbcd-e245-44a1-abbc-25ae050ccf0a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.176822] env[69927]: DEBUG oslo_vmware.api [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Waiting for the task: (returnval){ [ 717.176822] env[69927]: value = "task-4095524" [ 717.176822] env[69927]: _type = "Task" [ 717.176822] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.193302] env[69927]: DEBUG oslo_vmware.api [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': task-4095524, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.203373] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30d463ff-4d09-4993-b173-5e6806892861 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.225165] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b5f2fe-fa98-4da6-b283-754a99925240 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.270676] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63fb5492-3415-4217-a785-bb910abd08be {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.283508] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2a8f2b2-7f46-4a06-a407-794a60154b6c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.296048] env[69927]: DEBUG oslo_vmware.api [None req-ccec716d-ecef-48a9-b923-61e19592fdff tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095521, 'name': PowerOffVM_Task, 'duration_secs': 0.29333} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.308209] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccec716d-ecef-48a9-b923-61e19592fdff tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 717.308574] env[69927]: DEBUG nova.compute.manager [None req-ccec716d-ecef-48a9-b923-61e19592fdff tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 717.309274] env[69927]: DEBUG nova.compute.provider_tree [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 717.311438] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e2b84cf-e4ed-4fb8-8a24-4a4c0f2ef72a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.371390] env[69927]: DEBUG nova.network.neutron [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Updating instance_info_cache with network_info: [{"id": "67c7df0a-bb67-40ee-9a2d-11cea9dbacb7", "address": "fa:16:3e:89:98:e2", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.164", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67c7df0a-bb", "ovs_interfaceid": "67c7df0a-bb67-40ee-9a2d-11cea9dbacb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.442878] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095522, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.518249] env[69927]: DEBUG oslo_vmware.api [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095523, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.543856] env[69927]: DEBUG oslo_concurrency.lockutils [req-7d6d0b85-80d1-45f1-a75c-6c06fdb626df req-39743c9c-70ad-4837-aef3-a4e1648f34ce service nova] Releasing lock "refresh_cache-ee422a46-c6e4-4098-8f74-b9f0779d0fba" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 717.600121] env[69927]: DEBUG oslo_vmware.api [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4095516, 'name': RelocateVM_Task, 'duration_secs': 4.489369} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.600543] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Volume attach. Driver type: vmdk {{(pid=69927) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 717.600660] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811295', 'volume_id': 'fc20d8d5-9567-4a28-a803-7b54361ba124', 'name': 'volume-fc20d8d5-9567-4a28-a803-7b54361ba124', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5f67d6a0-e4b7-435e-8991-0f54e0379d22', 'attached_at': '', 'detached_at': '', 'volume_id': 'fc20d8d5-9567-4a28-a803-7b54361ba124', 'serial': 'fc20d8d5-9567-4a28-a803-7b54361ba124'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 717.601519] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39dc8336-c5b8-4201-ba8a-ac74ff295565 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.618382] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17df6cc3-c3a6-4f50-82eb-df68238ac134 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.642556] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] volume-fc20d8d5-9567-4a28-a803-7b54361ba124/volume-fc20d8d5-9567-4a28-a803-7b54361ba124.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 717.642879] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c43427e-e8cd-4ef7-aef0-aafb6e757b48 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.666537] env[69927]: DEBUG oslo_vmware.api [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Waiting for the task: (returnval){ [ 717.666537] env[69927]: value = "task-4095525" [ 717.666537] env[69927]: _type = "Task" [ 717.666537] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.675029] env[69927]: DEBUG oslo_vmware.api [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4095525, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.689237] env[69927]: DEBUG oslo_vmware.api [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': task-4095524, 'name': PowerOffVM_Task, 'duration_secs': 0.276255} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.689576] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 717.689806] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 717.690117] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cb3423f9-de00-4e64-95a9-329ca18e2d61 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.758301] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 717.758674] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 717.759408] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Deleting the datastore file [datastore1] 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 717.759560] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-83865723-d3c1-4293-a124-85fd08b47e02 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.768626] env[69927]: DEBUG oslo_vmware.api [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Waiting for the task: (returnval){ [ 717.768626] env[69927]: value = "task-4095527" [ 717.768626] env[69927]: _type = "Task" [ 717.768626] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.776531] env[69927]: DEBUG oslo_vmware.api [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': task-4095527, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.816357] env[69927]: DEBUG nova.scheduler.client.report [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 717.826508] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ccec716d-ecef-48a9-b923-61e19592fdff tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "128d0705-21a0-4103-ae84-85bbac7e718b" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.078s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 717.876679] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Releasing lock "refresh_cache-9348e368-cc3c-4bde-91ae-26fd03ad536a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 717.917355] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "256319c4-817d-4267-8531-a65f0f8cd0b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 717.918405] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "256319c4-817d-4267-8531-a65f0f8cd0b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 717.943668] env[69927]: DEBUG oslo_vmware.api [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095522, 'name': PowerOnVM_Task, 'duration_secs': 0.693044} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.944054] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 717.944287] env[69927]: INFO nova.compute.manager [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Took 12.60 seconds to spawn the instance on the hypervisor. [ 717.944554] env[69927]: DEBUG nova.compute.manager [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 717.945515] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f2801c-c67c-4cca-82e3-ba14bade1486 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.016028] env[69927]: DEBUG oslo_vmware.api [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095523, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.177460] env[69927]: DEBUG oslo_vmware.api [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4095525, 'name': ReconfigVM_Task, 'duration_secs': 0.504519} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.177700] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Reconfigured VM instance instance-00000012 to attach disk [datastore1] volume-fc20d8d5-9567-4a28-a803-7b54361ba124/volume-fc20d8d5-9567-4a28-a803-7b54361ba124.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 718.183125] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-941045c7-b539-4ac3-8488-f0f97f7e2f40 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.204394] env[69927]: DEBUG oslo_vmware.api [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Waiting for the task: (returnval){ [ 718.204394] env[69927]: value = "task-4095528" [ 718.204394] env[69927]: _type = "Task" [ 718.204394] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.217174] env[69927]: DEBUG oslo_vmware.api [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4095528, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.277461] env[69927]: DEBUG oslo_vmware.api [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Task: {'id': task-4095527, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168971} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.277730] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 718.277929] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 718.278388] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 718.278575] env[69927]: INFO nova.compute.manager [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Took 1.13 seconds to destroy the instance on the hypervisor. [ 718.278828] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 718.279030] env[69927]: DEBUG nova.compute.manager [-] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 718.279133] env[69927]: DEBUG nova.network.neutron [-] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 718.324311] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.804s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 718.326709] env[69927]: DEBUG nova.compute.manager [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 718.328799] env[69927]: DEBUG oslo_concurrency.lockutils [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.434s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.332918] env[69927]: INFO nova.compute.claims [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 718.468563] env[69927]: INFO nova.compute.manager [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Took 35.89 seconds to build instance. [ 718.525222] env[69927]: DEBUG oslo_vmware.api [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095523, 'name': CloneVM_Task} progress is 95%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.718986] env[69927]: DEBUG oslo_vmware.api [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4095528, 'name': ReconfigVM_Task, 'duration_secs': 0.278018} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.723347] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811295', 'volume_id': 'fc20d8d5-9567-4a28-a803-7b54361ba124', 'name': 'volume-fc20d8d5-9567-4a28-a803-7b54361ba124', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5f67d6a0-e4b7-435e-8991-0f54e0379d22', 'attached_at': '', 'detached_at': '', 'volume_id': 'fc20d8d5-9567-4a28-a803-7b54361ba124', 'serial': 'fc20d8d5-9567-4a28-a803-7b54361ba124'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 718.724021] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-12a93266-6a73-4669-9418-e63dfec25e4c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.732795] env[69927]: DEBUG oslo_vmware.api [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Waiting for the task: (returnval){ [ 718.732795] env[69927]: value = "task-4095529" [ 718.732795] env[69927]: _type = "Task" [ 718.732795] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.743934] env[69927]: DEBUG oslo_vmware.api [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4095529, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.838827] env[69927]: DEBUG nova.compute.utils [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 718.840874] env[69927]: DEBUG nova.compute.manager [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 718.841100] env[69927]: DEBUG nova.network.neutron [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 718.875480] env[69927]: DEBUG nova.compute.manager [req-09d98695-f862-44a4-bbc3-9ca245ad3356 req-52452cf5-c17e-4c9e-94eb-9f9bc4b679c6 service nova] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Received event network-changed-9a088536-28fd-4645-87ba-e071c754db1e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 718.875619] env[69927]: DEBUG nova.compute.manager [req-09d98695-f862-44a4-bbc3-9ca245ad3356 req-52452cf5-c17e-4c9e-94eb-9f9bc4b679c6 service nova] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Refreshing instance network info cache due to event network-changed-9a088536-28fd-4645-87ba-e071c754db1e. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 718.875811] env[69927]: DEBUG oslo_concurrency.lockutils [req-09d98695-f862-44a4-bbc3-9ca245ad3356 req-52452cf5-c17e-4c9e-94eb-9f9bc4b679c6 service nova] Acquiring lock "refresh_cache-8442f144-2be4-4634-b151-62f049a975b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.875975] env[69927]: DEBUG oslo_concurrency.lockutils [req-09d98695-f862-44a4-bbc3-9ca245ad3356 req-52452cf5-c17e-4c9e-94eb-9f9bc4b679c6 service nova] Acquired lock "refresh_cache-8442f144-2be4-4634-b151-62f049a975b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 718.876154] env[69927]: DEBUG nova.network.neutron [req-09d98695-f862-44a4-bbc3-9ca245ad3356 req-52452cf5-c17e-4c9e-94eb-9f9bc4b679c6 service nova] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Refreshing network info cache for port 9a088536-28fd-4645-87ba-e071c754db1e {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 718.877667] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "21b7b237-557e-4030-93bb-6b5ce417e53c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 718.877863] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "21b7b237-557e-4030-93bb-6b5ce417e53c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.890342] env[69927]: DEBUG nova.policy [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '47c1a434529b48d6b10e11d884d4e20f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd854419925b43cd8caaa303773ac093', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 718.974695] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d51a75ba-ac50-4c1c-8f8a-62cc87a94395 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lock "a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.323s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.020272] env[69927]: DEBUG oslo_vmware.api [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095523, 'name': CloneVM_Task, 'duration_secs': 1.624438} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.020560] env[69927]: INFO nova.virt.vmwareapi.vmops [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Created linked-clone VM from snapshot [ 719.021388] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d4ba5b3-13cc-47f7-8a69-93386e5f231c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.030566] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Uploading image d3f0871d-cf90-479c-afbc-39a5ec9421a4 {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 719.058029] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 719.058029] env[69927]: value = "vm-811341" [ 719.058029] env[69927]: _type = "VirtualMachine" [ 719.058029] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 719.058029] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f1d228ac-94b1-4261-acd6-26d55a58122c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.068018] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Lease: (returnval){ [ 719.068018] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52fd4b92-0eda-6122-6f11-0d9066fa591e" [ 719.068018] env[69927]: _type = "HttpNfcLease" [ 719.068018] env[69927]: } obtained for exporting VM: (result){ [ 719.068018] env[69927]: value = "vm-811341" [ 719.068018] env[69927]: _type = "VirtualMachine" [ 719.068018] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 719.068018] env[69927]: DEBUG oslo_vmware.api [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Waiting for the lease: (returnval){ [ 719.068018] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52fd4b92-0eda-6122-6f11-0d9066fa591e" [ 719.068018] env[69927]: _type = "HttpNfcLease" [ 719.068018] env[69927]: } to be ready. {{(pid=69927) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 719.068018] env[69927]: DEBUG nova.network.neutron [-] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.074736] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 719.074736] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52fd4b92-0eda-6122-6f11-0d9066fa591e" [ 719.074736] env[69927]: _type = "HttpNfcLease" [ 719.074736] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 719.123040] env[69927]: DEBUG nova.compute.manager [req-7cc947e5-4447-4b49-8e68-37a703df934c req-01b950de-b59c-402d-ac18-583a277861cd service nova] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Received event network-vif-deleted-68ff56b2-b063-423a-93e6-b3aba5245461 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 719.238616] env[69927]: DEBUG nova.network.neutron [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Successfully created port: 32ace01f-025d-4978-a510-c851c8daf246 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 719.244943] env[69927]: DEBUG oslo_vmware.api [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4095529, 'name': Rename_Task, 'duration_secs': 0.141704} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.244943] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 719.245169] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fd140147-4b06-412e-a78f-6b2ba7753bb5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.254021] env[69927]: DEBUG oslo_vmware.api [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Waiting for the task: (returnval){ [ 719.254021] env[69927]: value = "task-4095531" [ 719.254021] env[69927]: _type = "Task" [ 719.254021] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.264468] env[69927]: DEBUG oslo_vmware.api [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4095531, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.349603] env[69927]: DEBUG nova.compute.manager [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 719.394350] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f32c8e8e-43ba-4dd3-9d71-dc362c0d02a7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.416376] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Updating instance '9348e368-cc3c-4bde-91ae-26fd03ad536a' progress to 0 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 719.477053] env[69927]: DEBUG nova.compute.manager [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 719.572974] env[69927]: INFO nova.compute.manager [-] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Took 1.29 seconds to deallocate network for instance. [ 719.586789] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 719.586789] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52fd4b92-0eda-6122-6f11-0d9066fa591e" [ 719.586789] env[69927]: _type = "HttpNfcLease" [ 719.586789] env[69927]: } is ready. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 719.587322] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 719.587322] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52fd4b92-0eda-6122-6f11-0d9066fa591e" [ 719.587322] env[69927]: _type = "HttpNfcLease" [ 719.587322] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 719.588144] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98a91285-de72-4dbb-9ebe-7c1995ecfe92 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.599440] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cb1044-ebf1-986a-3dab-2e797f3efefe/disk-0.vmdk from lease info. {{(pid=69927) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 719.599736] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cb1044-ebf1-986a-3dab-2e797f3efefe/disk-0.vmdk for reading. {{(pid=69927) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 719.719248] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e3b61585-704b-4107-9747-461b6b590033 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.777551] env[69927]: DEBUG oslo_vmware.api [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4095531, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.931086] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 719.933067] env[69927]: DEBUG nova.network.neutron [req-09d98695-f862-44a4-bbc3-9ca245ad3356 req-52452cf5-c17e-4c9e-94eb-9f9bc4b679c6 service nova] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Updated VIF entry in instance network info cache for port 9a088536-28fd-4645-87ba-e071c754db1e. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 719.933660] env[69927]: DEBUG nova.network.neutron [req-09d98695-f862-44a4-bbc3-9ca245ad3356 req-52452cf5-c17e-4c9e-94eb-9f9bc4b679c6 service nova] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Updating instance_info_cache with network_info: [{"id": "9a088536-28fd-4645-87ba-e071c754db1e", "address": "fa:16:3e:ad:f4:34", "network": {"id": "187adf43-df44-41b7-8dc1-b541b4f84fee", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1475247123-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b414ce037734749949fe7951f9ffd1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d8383707-f093-40a7-a5ba-31b0e07cac45", "external-id": "cl2-zone-18", "segmentation_id": 18, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a088536-28", "ovs_interfaceid": "9a088536-28fd-4645-87ba-e071c754db1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.935415] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-713c8d50-f86e-4951-965b-379fc5ac3e2f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.945350] env[69927]: DEBUG oslo_vmware.api [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 719.945350] env[69927]: value = "task-4095532" [ 719.945350] env[69927]: _type = "Task" [ 719.945350] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.963377] env[69927]: DEBUG oslo_vmware.api [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095532, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.008136] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.063730] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19bdcc3-bb11-46b5-9c1d-685f4b669634 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.073708] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7991fabb-ff51-469b-8ed4-ad0a88faf390 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.108139] env[69927]: DEBUG oslo_concurrency.lockutils [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.110032] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-026bdb60-486e-420d-9542-af7643e15886 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.120273] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fb9913d-8343-4fd0-b573-6511fb540c98 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.139551] env[69927]: DEBUG nova.compute.provider_tree [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 720.270682] env[69927]: DEBUG oslo_vmware.api [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4095531, 'name': PowerOnVM_Task, 'duration_secs': 0.754432} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.271654] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 720.271916] env[69927]: INFO nova.compute.manager [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Took 10.17 seconds to spawn the instance on the hypervisor. [ 720.272120] env[69927]: DEBUG nova.compute.manager [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 720.273173] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1806b531-8e14-489b-84d5-4e5067d0243c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.364371] env[69927]: DEBUG nova.compute.manager [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 720.398300] env[69927]: DEBUG nova.virt.hardware [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 720.398300] env[69927]: DEBUG nova.virt.hardware [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 720.400195] env[69927]: DEBUG nova.virt.hardware [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 720.400542] env[69927]: DEBUG nova.virt.hardware [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 720.400849] env[69927]: DEBUG nova.virt.hardware [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 720.403254] env[69927]: DEBUG nova.virt.hardware [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 720.403254] env[69927]: DEBUG nova.virt.hardware [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 720.403913] env[69927]: DEBUG nova.virt.hardware [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 720.404337] env[69927]: DEBUG nova.virt.hardware [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 720.404819] env[69927]: DEBUG nova.virt.hardware [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 720.408473] env[69927]: DEBUG nova.virt.hardware [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 720.408473] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14587a76-f676-4eed-806f-591a17515524 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.418161] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d76dcdc0-20ea-426e-aba8-b19b4cb2bda3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.439333] env[69927]: DEBUG oslo_concurrency.lockutils [req-09d98695-f862-44a4-bbc3-9ca245ad3356 req-52452cf5-c17e-4c9e-94eb-9f9bc4b679c6 service nova] Releasing lock "refresh_cache-8442f144-2be4-4634-b151-62f049a975b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 720.460267] env[69927]: DEBUG oslo_vmware.api [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095532, 'name': PowerOffVM_Task, 'duration_secs': 0.322463} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.460756] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 720.460962] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Updating instance '9348e368-cc3c-4bde-91ae-26fd03ad536a' progress to 17 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 720.610738] env[69927]: DEBUG nova.compute.manager [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 720.611901] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9383397-492e-4ec7-aa9d-0249cd24ee19 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.645178] env[69927]: DEBUG nova.scheduler.client.report [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 720.797728] env[69927]: INFO nova.compute.manager [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Took 34.15 seconds to build instance. [ 720.976042] env[69927]: DEBUG nova.virt.hardware [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:35:01Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 720.976042] env[69927]: DEBUG nova.virt.hardware [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 720.976042] env[69927]: DEBUG nova.virt.hardware [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 720.976042] env[69927]: DEBUG nova.virt.hardware [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 720.976526] env[69927]: DEBUG nova.virt.hardware [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 720.976526] env[69927]: DEBUG nova.virt.hardware [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 720.976526] env[69927]: DEBUG nova.virt.hardware [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 720.976526] env[69927]: DEBUG nova.virt.hardware [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 720.976526] env[69927]: DEBUG nova.virt.hardware [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 720.976844] env[69927]: DEBUG nova.virt.hardware [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 720.976844] env[69927]: DEBUG nova.virt.hardware [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 720.986039] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8498ffb-e47d-40c1-ba51-2acff132aefe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.006466] env[69927]: DEBUG oslo_vmware.api [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 721.006466] env[69927]: value = "task-4095533" [ 721.006466] env[69927]: _type = "Task" [ 721.006466] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.021882] env[69927]: DEBUG oslo_vmware.api [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095533, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.125550] env[69927]: INFO nova.compute.manager [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] instance snapshotting [ 721.125550] env[69927]: WARNING nova.compute.manager [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 721.128673] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c18cf544-6152-4e7d-b251-0183f1414c38 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.153893] env[69927]: DEBUG oslo_concurrency.lockutils [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.825s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 721.154149] env[69927]: DEBUG nova.compute.manager [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 721.158519] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 22.660s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 721.158519] env[69927]: DEBUG nova.objects.instance [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69927) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 721.160428] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b29bff01-41af-44cc-92ff-268d8d2806b4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.249592] env[69927]: DEBUG nova.network.neutron [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Successfully updated port: 32ace01f-025d-4978-a510-c851c8daf246 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 721.301043] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1f511d31-ee37-4135-8b1f-078960dd8544 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Lock "5f67d6a0-e4b7-435e-8991-0f54e0379d22" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.343s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 721.517069] env[69927]: DEBUG oslo_vmware.api [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095533, 'name': ReconfigVM_Task, 'duration_secs': 0.352097} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.517509] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Updating instance '9348e368-cc3c-4bde-91ae-26fd03ad536a' progress to 33 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 721.672358] env[69927]: DEBUG nova.compute.utils [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 721.672358] env[69927]: DEBUG nova.compute.manager [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 721.672358] env[69927]: DEBUG nova.network.neutron [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 721.678060] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Creating Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 721.679285] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-198170db-9580-46fe-a4ba-7a3712e4aff1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.688591] env[69927]: DEBUG oslo_vmware.api [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 721.688591] env[69927]: value = "task-4095534" [ 721.688591] env[69927]: _type = "Task" [ 721.688591] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.699125] env[69927]: DEBUG oslo_vmware.api [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095534, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.715199] env[69927]: DEBUG nova.policy [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd89d0a2232b4da1a0b88799062fe8da', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3afde63c8cbe4aecb32a470fd6b948f6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 721.758502] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Acquiring lock "refresh_cache-6e698775-2556-4cbe-b65f-0cc3efa7bcf6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.758502] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Acquired lock "refresh_cache-6e698775-2556-4cbe-b65f-0cc3efa7bcf6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 721.758502] env[69927]: DEBUG nova.network.neutron [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 721.800034] env[69927]: DEBUG nova.compute.manager [req-c6b68638-d5f8-47f3-a546-bc4ddb551aeb req-00ac8bac-4b52-44ed-b9fe-7eb8e83a6496 service nova] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Received event network-changed-4e3d0fe6-1171-4bdd-bfab-86bb4f7af637 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 721.800289] env[69927]: DEBUG nova.compute.manager [req-c6b68638-d5f8-47f3-a546-bc4ddb551aeb req-00ac8bac-4b52-44ed-b9fe-7eb8e83a6496 service nova] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Refreshing instance network info cache due to event network-changed-4e3d0fe6-1171-4bdd-bfab-86bb4f7af637. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 721.800737] env[69927]: DEBUG oslo_concurrency.lockutils [req-c6b68638-d5f8-47f3-a546-bc4ddb551aeb req-00ac8bac-4b52-44ed-b9fe-7eb8e83a6496 service nova] Acquiring lock "refresh_cache-ee422a46-c6e4-4098-8f74-b9f0779d0fba" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.800884] env[69927]: DEBUG oslo_concurrency.lockutils [req-c6b68638-d5f8-47f3-a546-bc4ddb551aeb req-00ac8bac-4b52-44ed-b9fe-7eb8e83a6496 service nova] Acquired lock "refresh_cache-ee422a46-c6e4-4098-8f74-b9f0779d0fba" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 721.801149] env[69927]: DEBUG nova.network.neutron [req-c6b68638-d5f8-47f3-a546-bc4ddb551aeb req-00ac8bac-4b52-44ed-b9fe-7eb8e83a6496 service nova] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Refreshing network info cache for port 4e3d0fe6-1171-4bdd-bfab-86bb4f7af637 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 721.806221] env[69927]: DEBUG nova.compute.manager [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 722.027521] env[69927]: DEBUG nova.virt.hardware [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 722.028474] env[69927]: DEBUG nova.virt.hardware [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 722.028775] env[69927]: DEBUG nova.virt.hardware [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 722.029711] env[69927]: DEBUG nova.virt.hardware [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 722.029989] env[69927]: DEBUG nova.virt.hardware [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 722.030228] env[69927]: DEBUG nova.virt.hardware [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 722.030641] env[69927]: DEBUG nova.virt.hardware [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 722.031184] env[69927]: DEBUG nova.virt.hardware [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 722.031184] env[69927]: DEBUG nova.virt.hardware [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 722.031184] env[69927]: DEBUG nova.virt.hardware [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 722.031353] env[69927]: DEBUG nova.virt.hardware [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 722.037896] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Reconfiguring VM instance instance-00000006 to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 722.040096] env[69927]: DEBUG nova.network.neutron [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Successfully created port: 85ea851a-ed88-452f-acd1-8e025fdeaeb0 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 722.042614] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-484234b7-0c8f-4f76-b0ca-b01573eecca3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.066523] env[69927]: DEBUG oslo_vmware.api [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 722.066523] env[69927]: value = "task-4095535" [ 722.066523] env[69927]: _type = "Task" [ 722.066523] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.077258] env[69927]: DEBUG oslo_vmware.api [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095535, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.172603] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45d2fec1-9eb3-4f6a-b16c-2dcc50b19a04 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 722.173720] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.026s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.178032] env[69927]: INFO nova.compute.claims [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 722.179654] env[69927]: DEBUG nova.compute.manager [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 722.208114] env[69927]: DEBUG oslo_vmware.api [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095534, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.274211] env[69927]: DEBUG nova.compute.manager [req-6dcb90c1-fdf5-4318-b524-302246eeae77 req-fa28b8ea-01c4-49b7-8d12-9d1337b8b5cd service nova] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Received event network-vif-plugged-32ace01f-025d-4978-a510-c851c8daf246 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 722.276169] env[69927]: DEBUG oslo_concurrency.lockutils [req-6dcb90c1-fdf5-4318-b524-302246eeae77 req-fa28b8ea-01c4-49b7-8d12-9d1337b8b5cd service nova] Acquiring lock "6e698775-2556-4cbe-b65f-0cc3efa7bcf6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.276169] env[69927]: DEBUG oslo_concurrency.lockutils [req-6dcb90c1-fdf5-4318-b524-302246eeae77 req-fa28b8ea-01c4-49b7-8d12-9d1337b8b5cd service nova] Lock "6e698775-2556-4cbe-b65f-0cc3efa7bcf6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.276169] env[69927]: DEBUG oslo_concurrency.lockutils [req-6dcb90c1-fdf5-4318-b524-302246eeae77 req-fa28b8ea-01c4-49b7-8d12-9d1337b8b5cd service nova] Lock "6e698775-2556-4cbe-b65f-0cc3efa7bcf6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 722.276169] env[69927]: DEBUG nova.compute.manager [req-6dcb90c1-fdf5-4318-b524-302246eeae77 req-fa28b8ea-01c4-49b7-8d12-9d1337b8b5cd service nova] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] No waiting events found dispatching network-vif-plugged-32ace01f-025d-4978-a510-c851c8daf246 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 722.276954] env[69927]: WARNING nova.compute.manager [req-6dcb90c1-fdf5-4318-b524-302246eeae77 req-fa28b8ea-01c4-49b7-8d12-9d1337b8b5cd service nova] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Received unexpected event network-vif-plugged-32ace01f-025d-4978-a510-c851c8daf246 for instance with vm_state building and task_state spawning. [ 722.317302] env[69927]: DEBUG nova.network.neutron [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 722.335952] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.579523] env[69927]: DEBUG oslo_vmware.api [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095535, 'name': ReconfigVM_Task, 'duration_secs': 0.209922} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.579523] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Reconfigured VM instance instance-00000006 to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 722.580225] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2360c472-4b6a-431f-94bd-1b9268a75d80 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.608169] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] 9348e368-cc3c-4bde-91ae-26fd03ad536a/9348e368-cc3c-4bde-91ae-26fd03ad536a.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 722.614673] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3473e0f5-a02b-4e8d-8623-4d7b72963948 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.637609] env[69927]: DEBUG oslo_vmware.api [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 722.637609] env[69927]: value = "task-4095536" [ 722.637609] env[69927]: _type = "Task" [ 722.637609] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.649553] env[69927]: DEBUG oslo_vmware.api [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095536, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.682319] env[69927]: DEBUG nova.network.neutron [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Updating instance_info_cache with network_info: [{"id": "32ace01f-025d-4978-a510-c851c8daf246", "address": "fa:16:3e:0a:51:0d", "network": {"id": "31bda7f6-2b49-4aee-82dd-68e864f7587c", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-956592551-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd854419925b43cd8caaa303773ac093", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a", "external-id": "nsx-vlan-transportzone-256", "segmentation_id": 256, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32ace01f-02", "ovs_interfaceid": "32ace01f-025d-4978-a510-c851c8daf246", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.703604] env[69927]: DEBUG oslo_vmware.api [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095534, 'name': CreateSnapshot_Task, 'duration_secs': 0.692374} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.703604] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Created Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 722.703604] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfaf74fb-8cc3-4e6c-b4cc-99ff8be50d10 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.161230] env[69927]: DEBUG oslo_vmware.api [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095536, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.161230] env[69927]: DEBUG nova.network.neutron [req-c6b68638-d5f8-47f3-a546-bc4ddb551aeb req-00ac8bac-4b52-44ed-b9fe-7eb8e83a6496 service nova] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Updated VIF entry in instance network info cache for port 4e3d0fe6-1171-4bdd-bfab-86bb4f7af637. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 723.161903] env[69927]: DEBUG nova.network.neutron [req-c6b68638-d5f8-47f3-a546-bc4ddb551aeb req-00ac8bac-4b52-44ed-b9fe-7eb8e83a6496 service nova] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Updating instance_info_cache with network_info: [{"id": "4e3d0fe6-1171-4bdd-bfab-86bb4f7af637", "address": "fa:16:3e:46:5d:de", "network": {"id": "d34a6e93-05fd-40da-b452-332d47c34037", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1653403369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "179addc4042d4c65b15c008132e74bf6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e3d0fe6-11", "ovs_interfaceid": "4e3d0fe6-1171-4bdd-bfab-86bb4f7af637", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.187439] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Releasing lock "refresh_cache-6e698775-2556-4cbe-b65f-0cc3efa7bcf6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 723.187439] env[69927]: DEBUG nova.compute.manager [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Instance network_info: |[{"id": "32ace01f-025d-4978-a510-c851c8daf246", "address": "fa:16:3e:0a:51:0d", "network": {"id": "31bda7f6-2b49-4aee-82dd-68e864f7587c", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-956592551-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd854419925b43cd8caaa303773ac093", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a", "external-id": "nsx-vlan-transportzone-256", "segmentation_id": 256, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32ace01f-02", "ovs_interfaceid": "32ace01f-025d-4978-a510-c851c8daf246", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 723.187890] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:51:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '32ace01f-025d-4978-a510-c851c8daf246', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 723.197668] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Creating folder: Project (bd854419925b43cd8caaa303773ac093). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 723.201309] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-db57b7ff-bf00-4969-afbd-b34eef9e7f57 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.214900] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Created folder: Project (bd854419925b43cd8caaa303773ac093) in parent group-v811283. [ 723.215242] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Creating folder: Instances. Parent ref: group-v811343. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 723.215586] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-92b482b8-583c-4ce7-826e-77d13f867523 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.221964] env[69927]: DEBUG nova.compute.manager [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 723.232397] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Creating linked-clone VM from snapshot {{(pid=69927) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 723.239456] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9407bee7-caad-4ab0-8669-9e61d8c8d805 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.242542] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Created folder: Instances in parent group-v811343. [ 723.242861] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 723.243453] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 723.244231] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e199e994-2e63-4f7a-8b96-795175dab128 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.269278] env[69927]: DEBUG oslo_vmware.api [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 723.269278] env[69927]: value = "task-4095539" [ 723.269278] env[69927]: _type = "Task" [ 723.269278] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.276014] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 723.276014] env[69927]: value = "task-4095540" [ 723.276014] env[69927]: _type = "Task" [ 723.276014] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.287323] env[69927]: DEBUG nova.virt.hardware [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 723.287655] env[69927]: DEBUG nova.virt.hardware [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 723.288245] env[69927]: DEBUG nova.virt.hardware [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 723.288245] env[69927]: DEBUG nova.virt.hardware [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 723.288245] env[69927]: DEBUG nova.virt.hardware [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 723.288937] env[69927]: DEBUG nova.virt.hardware [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 723.288937] env[69927]: DEBUG nova.virt.hardware [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 723.288937] env[69927]: DEBUG nova.virt.hardware [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 723.289139] env[69927]: DEBUG nova.virt.hardware [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 723.289411] env[69927]: DEBUG nova.virt.hardware [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 723.289476] env[69927]: DEBUG nova.virt.hardware [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 723.289783] env[69927]: DEBUG oslo_vmware.api [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095539, 'name': CloneVM_Task} progress is 11%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.290628] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57cf9b4c-2c00-44b8-8ca9-0e685c4d12cf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.307201] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095540, 'name': CreateVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.309835] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-671e962a-c03f-4354-825a-e2ab2879b68a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.655563] env[69927]: DEBUG oslo_vmware.api [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095536, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.665584] env[69927]: DEBUG oslo_concurrency.lockutils [req-c6b68638-d5f8-47f3-a546-bc4ddb551aeb req-00ac8bac-4b52-44ed-b9fe-7eb8e83a6496 service nova] Releasing lock "refresh_cache-ee422a46-c6e4-4098-8f74-b9f0779d0fba" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 723.704054] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquiring lock "a36b06ca-77c8-4d2f-8b43-2c160fbac93f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.704338] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lock "a36b06ca-77c8-4d2f-8b43-2c160fbac93f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 723.787308] env[69927]: DEBUG oslo_vmware.api [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095539, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.794101] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095540, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.880030] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dfd88cd-7586-472b-96d6-1ab9f68eac85 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.888493] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c000ddec-12b4-4cd0-81a8-928980902569 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.927212] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17fa5a0e-41aa-4c4f-b378-39bbe121ea06 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.940231] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c2ac92e-70ab-4903-a0eb-06f50ee86a75 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.955865] env[69927]: DEBUG nova.compute.provider_tree [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 724.153950] env[69927]: DEBUG oslo_vmware.api [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095536, 'name': ReconfigVM_Task, 'duration_secs': 1.180099} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.154811] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Reconfigured VM instance instance-00000006 to attach disk [datastore2] 9348e368-cc3c-4bde-91ae-26fd03ad536a/9348e368-cc3c-4bde-91ae-26fd03ad536a.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 724.154811] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Updating instance '9348e368-cc3c-4bde-91ae-26fd03ad536a' progress to 50 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 724.287610] env[69927]: DEBUG oslo_vmware.api [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095539, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.294917] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095540, 'name': CreateVM_Task, 'duration_secs': 0.583238} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.294917] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 724.295526] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.295804] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 724.296358] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 724.297084] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4026bfb0-f889-4922-90e3-74cad09c885b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.303441] env[69927]: DEBUG oslo_vmware.api [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Waiting for the task: (returnval){ [ 724.303441] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5223cf07-67f2-3853-9e43-f1b57becfcba" [ 724.303441] env[69927]: _type = "Task" [ 724.303441] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.317205] env[69927]: DEBUG oslo_vmware.api [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5223cf07-67f2-3853-9e43-f1b57becfcba, 'name': SearchDatastore_Task, 'duration_secs': 0.010976} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.317801] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 724.318417] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 724.318653] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.319393] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 724.319393] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 724.319393] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a098f08-dc75-42aa-9e3b-2de7aa738928 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.331160] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 724.331160] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 724.331160] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d772619-427c-4ccf-b26b-84a9bdb015cb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.337326] env[69927]: DEBUG oslo_vmware.api [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Waiting for the task: (returnval){ [ 724.337326] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f7aa83-4b97-50e7-bd6a-1227384a009a" [ 724.337326] env[69927]: _type = "Task" [ 724.337326] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.348807] env[69927]: DEBUG oslo_vmware.api [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f7aa83-4b97-50e7-bd6a-1227384a009a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.438418] env[69927]: DEBUG nova.network.neutron [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Successfully updated port: 85ea851a-ed88-452f-acd1-8e025fdeaeb0 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 724.460200] env[69927]: DEBUG nova.scheduler.client.report [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 724.664369] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bea6b953-1682-4c43-a3c1-db1ab77d7145 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.694734] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29cb6be-59aa-47ef-9d57-7ff875ec705c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.723964] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Updating instance '9348e368-cc3c-4bde-91ae-26fd03ad536a' progress to 67 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 724.784387] env[69927]: DEBUG oslo_vmware.api [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095539, 'name': CloneVM_Task} progress is 95%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.853830] env[69927]: DEBUG oslo_vmware.api [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f7aa83-4b97-50e7-bd6a-1227384a009a, 'name': SearchDatastore_Task, 'duration_secs': 0.009989} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.855846] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53f8ead8-b464-477b-bc02-ce07922e899a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.861409] env[69927]: DEBUG nova.compute.manager [req-e1da1b49-a97d-4ed6-bd16-ee2f2fbb9ddf req-a5671291-f7df-40af-9b9b-1b862a18ac99 service nova] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Received event network-changed-043de9ef-5509-4243-acb7-a19660d6d813 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 724.861555] env[69927]: DEBUG nova.compute.manager [req-e1da1b49-a97d-4ed6-bd16-ee2f2fbb9ddf req-a5671291-f7df-40af-9b9b-1b862a18ac99 service nova] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Refreshing instance network info cache due to event network-changed-043de9ef-5509-4243-acb7-a19660d6d813. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 724.861817] env[69927]: DEBUG oslo_concurrency.lockutils [req-e1da1b49-a97d-4ed6-bd16-ee2f2fbb9ddf req-a5671291-f7df-40af-9b9b-1b862a18ac99 service nova] Acquiring lock "refresh_cache-5f67d6a0-e4b7-435e-8991-0f54e0379d22" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.862324] env[69927]: DEBUG oslo_concurrency.lockutils [req-e1da1b49-a97d-4ed6-bd16-ee2f2fbb9ddf req-a5671291-f7df-40af-9b9b-1b862a18ac99 service nova] Acquired lock "refresh_cache-5f67d6a0-e4b7-435e-8991-0f54e0379d22" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 724.862556] env[69927]: DEBUG nova.network.neutron [req-e1da1b49-a97d-4ed6-bd16-ee2f2fbb9ddf req-a5671291-f7df-40af-9b9b-1b862a18ac99 service nova] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Refreshing network info cache for port 043de9ef-5509-4243-acb7-a19660d6d813 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 724.871559] env[69927]: DEBUG oslo_vmware.api [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Waiting for the task: (returnval){ [ 724.871559] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52035ad7-9f8c-d9a5-0ad6-21875f97d575" [ 724.871559] env[69927]: _type = "Task" [ 724.871559] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.883831] env[69927]: DEBUG oslo_vmware.api [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52035ad7-9f8c-d9a5-0ad6-21875f97d575, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.944120] env[69927]: DEBUG oslo_concurrency.lockutils [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "refresh_cache-39421358-2d66-4fbe-a4e0-8fdb0b420c5e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.944120] env[69927]: DEBUG oslo_concurrency.lockutils [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired lock "refresh_cache-39421358-2d66-4fbe-a4e0-8fdb0b420c5e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 724.944120] env[69927]: DEBUG nova.network.neutron [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 724.969723] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.795s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.969723] env[69927]: DEBUG nova.compute.manager [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 724.974459] env[69927]: DEBUG oslo_concurrency.lockutils [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.070s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.977415] env[69927]: INFO nova.compute.claims [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 725.174610] env[69927]: DEBUG nova.compute.manager [req-19a407de-10bf-489c-bf04-4ad7b9336f35 req-2d0706da-5596-49de-a7b7-d63ae48c6ac6 service nova] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Received event network-changed-32ace01f-025d-4978-a510-c851c8daf246 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 725.174899] env[69927]: DEBUG nova.compute.manager [req-19a407de-10bf-489c-bf04-4ad7b9336f35 req-2d0706da-5596-49de-a7b7-d63ae48c6ac6 service nova] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Refreshing instance network info cache due to event network-changed-32ace01f-025d-4978-a510-c851c8daf246. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 725.176122] env[69927]: DEBUG oslo_concurrency.lockutils [req-19a407de-10bf-489c-bf04-4ad7b9336f35 req-2d0706da-5596-49de-a7b7-d63ae48c6ac6 service nova] Acquiring lock "refresh_cache-6e698775-2556-4cbe-b65f-0cc3efa7bcf6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.176320] env[69927]: DEBUG oslo_concurrency.lockutils [req-19a407de-10bf-489c-bf04-4ad7b9336f35 req-2d0706da-5596-49de-a7b7-d63ae48c6ac6 service nova] Acquired lock "refresh_cache-6e698775-2556-4cbe-b65f-0cc3efa7bcf6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 725.176501] env[69927]: DEBUG nova.network.neutron [req-19a407de-10bf-489c-bf04-4ad7b9336f35 req-2d0706da-5596-49de-a7b7-d63ae48c6ac6 service nova] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Refreshing network info cache for port 32ace01f-025d-4978-a510-c851c8daf246 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 725.288374] env[69927]: DEBUG oslo_vmware.api [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095539, 'name': CloneVM_Task, 'duration_secs': 1.868255} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.288374] env[69927]: INFO nova.virt.vmwareapi.vmops [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Created linked-clone VM from snapshot [ 725.289095] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a20ce19-096a-4491-a45e-1f699068cf1f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.300341] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Uploading image dca99a17-c51b-43ff-b199-20dc19f87d2a {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 725.326861] env[69927]: DEBUG oslo_vmware.rw_handles [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 725.326861] env[69927]: value = "vm-811345" [ 725.326861] env[69927]: _type = "VirtualMachine" [ 725.326861] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 725.327435] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-4aaf9900-e926-4919-9135-22ef6dbc72b2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.336209] env[69927]: DEBUG oslo_vmware.rw_handles [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lease: (returnval){ [ 725.336209] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526c0be5-9316-39af-7ba9-107526972197" [ 725.336209] env[69927]: _type = "HttpNfcLease" [ 725.336209] env[69927]: } obtained for exporting VM: (result){ [ 725.336209] env[69927]: value = "vm-811345" [ 725.336209] env[69927]: _type = "VirtualMachine" [ 725.336209] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 725.336209] env[69927]: DEBUG oslo_vmware.api [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the lease: (returnval){ [ 725.336209] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526c0be5-9316-39af-7ba9-107526972197" [ 725.336209] env[69927]: _type = "HttpNfcLease" [ 725.336209] env[69927]: } to be ready. {{(pid=69927) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 725.344416] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 725.344416] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526c0be5-9316-39af-7ba9-107526972197" [ 725.344416] env[69927]: _type = "HttpNfcLease" [ 725.344416] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 725.352998] env[69927]: DEBUG nova.network.neutron [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Port 67c7df0a-bb67-40ee-9a2d-11cea9dbacb7 binding to destination host cpu-1 is already ACTIVE {{(pid=69927) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 725.384555] env[69927]: DEBUG oslo_vmware.api [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52035ad7-9f8c-d9a5-0ad6-21875f97d575, 'name': SearchDatastore_Task, 'duration_secs': 0.014636} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.386395] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 725.386395] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 6e698775-2556-4cbe-b65f-0cc3efa7bcf6/6e698775-2556-4cbe-b65f-0cc3efa7bcf6.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 725.386395] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e98747f3-f776-4ff3-a49a-1ae406198c1c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.394927] env[69927]: DEBUG oslo_vmware.api [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Waiting for the task: (returnval){ [ 725.394927] env[69927]: value = "task-4095542" [ 725.394927] env[69927]: _type = "Task" [ 725.394927] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.405761] env[69927]: DEBUG oslo_vmware.api [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Task: {'id': task-4095542, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.483902] env[69927]: DEBUG nova.compute.utils [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 725.486549] env[69927]: DEBUG nova.compute.manager [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 725.486764] env[69927]: DEBUG nova.network.neutron [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 725.542658] env[69927]: DEBUG nova.network.neutron [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 725.851170] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 725.851170] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526c0be5-9316-39af-7ba9-107526972197" [ 725.851170] env[69927]: _type = "HttpNfcLease" [ 725.851170] env[69927]: } is ready. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 725.851433] env[69927]: DEBUG oslo_vmware.rw_handles [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 725.851433] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526c0be5-9316-39af-7ba9-107526972197" [ 725.851433] env[69927]: _type = "HttpNfcLease" [ 725.851433] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 725.855435] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e71402ef-ab73-496c-8743-0fc3c8b3b17e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.869727] env[69927]: DEBUG nova.policy [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '86abcc2a262949b48a37cc485021ff0b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '65f02266005e44b1a2d8d2eddec9795b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 725.879454] env[69927]: DEBUG oslo_vmware.rw_handles [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f4a123-85ef-a17d-8bc5-9a302498be93/disk-0.vmdk from lease info. {{(pid=69927) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 725.879575] env[69927]: DEBUG oslo_vmware.rw_handles [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f4a123-85ef-a17d-8bc5-9a302498be93/disk-0.vmdk for reading. {{(pid=69927) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 725.967819] env[69927]: DEBUG oslo_vmware.api [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Task: {'id': task-4095542, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513472} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.968222] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 6e698775-2556-4cbe-b65f-0cc3efa7bcf6/6e698775-2556-4cbe-b65f-0cc3efa7bcf6.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 725.968961] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 725.968961] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-212aca85-b9ae-463f-8a6f-1712420cf6d1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.979978] env[69927]: DEBUG oslo_vmware.api [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Waiting for the task: (returnval){ [ 725.979978] env[69927]: value = "task-4095543" [ 725.979978] env[69927]: _type = "Task" [ 725.979978] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.995995] env[69927]: DEBUG nova.compute.manager [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 726.008982] env[69927]: DEBUG oslo_vmware.api [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Task: {'id': task-4095543, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.009839] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f7fffbf1-2a14-446e-933b-7b7919c929fa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.033643] env[69927]: DEBUG nova.network.neutron [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Updating instance_info_cache with network_info: [{"id": "85ea851a-ed88-452f-acd1-8e025fdeaeb0", "address": "fa:16:3e:17:38:c3", "network": {"id": "2cac2664-2b1d-4bb6-a58a-f8e96679d038", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1945522269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3afde63c8cbe4aecb32a470fd6b948f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85ea851a-ed", "ovs_interfaceid": "85ea851a-ed88-452f-acd1-8e025fdeaeb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.222257] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Acquiring lock "ee422a46-c6e4-4098-8f74-b9f0779d0fba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.222433] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Lock "ee422a46-c6e4-4098-8f74-b9f0779d0fba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.222854] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Acquiring lock "ee422a46-c6e4-4098-8f74-b9f0779d0fba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.222854] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Lock "ee422a46-c6e4-4098-8f74-b9f0779d0fba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.223082] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Lock "ee422a46-c6e4-4098-8f74-b9f0779d0fba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.225863] env[69927]: INFO nova.compute.manager [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Terminating instance [ 726.387486] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "9348e368-cc3c-4bde-91ae-26fd03ad536a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.387486] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "9348e368-cc3c-4bde-91ae-26fd03ad536a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.387486] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "9348e368-cc3c-4bde-91ae-26fd03ad536a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.444291] env[69927]: DEBUG nova.network.neutron [req-19a407de-10bf-489c-bf04-4ad7b9336f35 req-2d0706da-5596-49de-a7b7-d63ae48c6ac6 service nova] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Updated VIF entry in instance network info cache for port 32ace01f-025d-4978-a510-c851c8daf246. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 726.444651] env[69927]: DEBUG nova.network.neutron [req-19a407de-10bf-489c-bf04-4ad7b9336f35 req-2d0706da-5596-49de-a7b7-d63ae48c6ac6 service nova] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Updating instance_info_cache with network_info: [{"id": "32ace01f-025d-4978-a510-c851c8daf246", "address": "fa:16:3e:0a:51:0d", "network": {"id": "31bda7f6-2b49-4aee-82dd-68e864f7587c", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-956592551-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd854419925b43cd8caaa303773ac093", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a", "external-id": "nsx-vlan-transportzone-256", "segmentation_id": 256, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32ace01f-02", "ovs_interfaceid": "32ace01f-025d-4978-a510-c851c8daf246", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.494371] env[69927]: DEBUG oslo_vmware.api [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Task: {'id': task-4095543, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.107744} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.495100] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 726.496113] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaf8cc7d-b4c7-4752-9375-5c452a1aeccc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.542270] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] 6e698775-2556-4cbe-b65f-0cc3efa7bcf6/6e698775-2556-4cbe-b65f-0cc3efa7bcf6.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 726.546588] env[69927]: DEBUG oslo_concurrency.lockutils [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Releasing lock "refresh_cache-39421358-2d66-4fbe-a4e0-8fdb0b420c5e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 726.547067] env[69927]: DEBUG nova.compute.manager [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Instance network_info: |[{"id": "85ea851a-ed88-452f-acd1-8e025fdeaeb0", "address": "fa:16:3e:17:38:c3", "network": {"id": "2cac2664-2b1d-4bb6-a58a-f8e96679d038", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1945522269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3afde63c8cbe4aecb32a470fd6b948f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85ea851a-ed", "ovs_interfaceid": "85ea851a-ed88-452f-acd1-8e025fdeaeb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 726.547596] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8256c2c2-4855-4f56-b6e0-0b2d10524bae {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.563918] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:38:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '130387c4-e4ec-4d95-8e9d-bb079baabad8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '85ea851a-ed88-452f-acd1-8e025fdeaeb0', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 726.571480] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Creating folder: Project (3afde63c8cbe4aecb32a470fd6b948f6). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 726.571943] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5ee1769e-78b3-49dd-9110-daca77807a6f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.583032] env[69927]: DEBUG oslo_vmware.api [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Waiting for the task: (returnval){ [ 726.583032] env[69927]: value = "task-4095544" [ 726.583032] env[69927]: _type = "Task" [ 726.583032] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.584988] env[69927]: DEBUG nova.network.neutron [req-e1da1b49-a97d-4ed6-bd16-ee2f2fbb9ddf req-a5671291-f7df-40af-9b9b-1b862a18ac99 service nova] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Updated VIF entry in instance network info cache for port 043de9ef-5509-4243-acb7-a19660d6d813. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 726.585169] env[69927]: DEBUG nova.network.neutron [req-e1da1b49-a97d-4ed6-bd16-ee2f2fbb9ddf req-a5671291-f7df-40af-9b9b-1b862a18ac99 service nova] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Updating instance_info_cache with network_info: [{"id": "043de9ef-5509-4243-acb7-a19660d6d813", "address": "fa:16:3e:21:e4:ce", "network": {"id": "590d3974-8666-4163-9279-c966932d74cb", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1006416743-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b875cf10c4074fe5b9e6497c5e823fa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bb10726-a946-47b9-b4b5-6916e3f14cc5", "external-id": "nsx-vlan-transportzone-609", "segmentation_id": 609, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap043de9ef-55", "ovs_interfaceid": "043de9ef-5509-4243-acb7-a19660d6d813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.592667] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Created folder: Project (3afde63c8cbe4aecb32a470fd6b948f6) in parent group-v811283. [ 726.593064] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Creating folder: Instances. Parent ref: group-v811347. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 726.599922] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-46e73fc5-6ade-4ac9-8fc2-186abf7d9a1e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.606969] env[69927]: DEBUG oslo_vmware.api [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Task: {'id': task-4095544, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.618956] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Created folder: Instances in parent group-v811347. [ 726.619246] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 726.619464] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 726.623058] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7ce6a751-3895-4ff2-b37c-ee2bfe6f9b82 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.648923] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 726.648923] env[69927]: value = "task-4095547" [ 726.648923] env[69927]: _type = "Task" [ 726.648923] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.660468] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095547, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.737644] env[69927]: DEBUG nova.compute.manager [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 726.737644] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 726.738640] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b09182fa-c2c8-42f8-b958-f8fbc82510ca {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.751979] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 726.755734] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-35ea4a65-7fef-4df3-a9b4-a450d95f723d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.765974] env[69927]: DEBUG oslo_vmware.api [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Waiting for the task: (returnval){ [ 726.765974] env[69927]: value = "task-4095548" [ 726.765974] env[69927]: _type = "Task" [ 726.765974] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.776120] env[69927]: DEBUG oslo_vmware.api [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': task-4095548, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.897117] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e675a7f9-54cb-4741-89e6-adfb54c9c13e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.907528] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a622caaa-4ef6-49c9-9501-562ded3d883e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.954240] env[69927]: DEBUG oslo_concurrency.lockutils [req-19a407de-10bf-489c-bf04-4ad7b9336f35 req-2d0706da-5596-49de-a7b7-d63ae48c6ac6 service nova] Releasing lock "refresh_cache-6e698775-2556-4cbe-b65f-0cc3efa7bcf6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 726.954596] env[69927]: DEBUG nova.compute.manager [req-19a407de-10bf-489c-bf04-4ad7b9336f35 req-2d0706da-5596-49de-a7b7-d63ae48c6ac6 service nova] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Received event network-vif-plugged-85ea851a-ed88-452f-acd1-8e025fdeaeb0 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 726.954871] env[69927]: DEBUG oslo_concurrency.lockutils [req-19a407de-10bf-489c-bf04-4ad7b9336f35 req-2d0706da-5596-49de-a7b7-d63ae48c6ac6 service nova] Acquiring lock "39421358-2d66-4fbe-a4e0-8fdb0b420c5e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.955126] env[69927]: DEBUG oslo_concurrency.lockutils [req-19a407de-10bf-489c-bf04-4ad7b9336f35 req-2d0706da-5596-49de-a7b7-d63ae48c6ac6 service nova] Lock "39421358-2d66-4fbe-a4e0-8fdb0b420c5e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.955464] env[69927]: DEBUG oslo_concurrency.lockutils [req-19a407de-10bf-489c-bf04-4ad7b9336f35 req-2d0706da-5596-49de-a7b7-d63ae48c6ac6 service nova] Lock "39421358-2d66-4fbe-a4e0-8fdb0b420c5e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.955711] env[69927]: DEBUG nova.compute.manager [req-19a407de-10bf-489c-bf04-4ad7b9336f35 req-2d0706da-5596-49de-a7b7-d63ae48c6ac6 service nova] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] No waiting events found dispatching network-vif-plugged-85ea851a-ed88-452f-acd1-8e025fdeaeb0 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 726.956384] env[69927]: WARNING nova.compute.manager [req-19a407de-10bf-489c-bf04-4ad7b9336f35 req-2d0706da-5596-49de-a7b7-d63ae48c6ac6 service nova] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Received unexpected event network-vif-plugged-85ea851a-ed88-452f-acd1-8e025fdeaeb0 for instance with vm_state building and task_state spawning. [ 726.957783] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c90caa-12ef-4278-b628-b044382bd0b2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.968470] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e00536-4b41-4075-b27a-1fdc40d7acae {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.993455] env[69927]: DEBUG nova.compute.provider_tree [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 727.036841] env[69927]: DEBUG nova.network.neutron [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Successfully created port: fbaaf58d-7762-4361-af98-ac03d24f2a05 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 727.048032] env[69927]: DEBUG nova.compute.manager [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 727.091870] env[69927]: DEBUG oslo_concurrency.lockutils [req-e1da1b49-a97d-4ed6-bd16-ee2f2fbb9ddf req-a5671291-f7df-40af-9b9b-1b862a18ac99 service nova] Releasing lock "refresh_cache-5f67d6a0-e4b7-435e-8991-0f54e0379d22" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 727.098975] env[69927]: DEBUG oslo_vmware.api [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Task: {'id': task-4095544, 'name': ReconfigVM_Task, 'duration_secs': 0.394658} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.099332] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Reconfigured VM instance instance-00000013 to attach disk [datastore1] 6e698775-2556-4cbe-b65f-0cc3efa7bcf6/6e698775-2556-4cbe-b65f-0cc3efa7bcf6.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 727.099987] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-871f50cd-7fcb-469a-b7de-ec5568de17a8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.109469] env[69927]: DEBUG oslo_vmware.api [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Waiting for the task: (returnval){ [ 727.109469] env[69927]: value = "task-4095549" [ 727.109469] env[69927]: _type = "Task" [ 727.109469] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.121685] env[69927]: DEBUG oslo_vmware.api [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Task: {'id': task-4095549, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.162410] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095547, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.286928] env[69927]: DEBUG oslo_vmware.api [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': task-4095548, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.470812] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "refresh_cache-9348e368-cc3c-4bde-91ae-26fd03ad536a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.472470] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquired lock "refresh_cache-9348e368-cc3c-4bde-91ae-26fd03ad536a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 727.472568] env[69927]: DEBUG nova.network.neutron [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 727.496972] env[69927]: DEBUG nova.scheduler.client.report [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 727.622652] env[69927]: DEBUG oslo_vmware.api [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Task: {'id': task-4095549, 'name': Rename_Task, 'duration_secs': 0.194443} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.622908] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 727.623185] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-09a2ed06-2491-4ac1-9f2a-4a8e5678a80a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.632932] env[69927]: DEBUG oslo_vmware.api [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Waiting for the task: (returnval){ [ 727.632932] env[69927]: value = "task-4095550" [ 727.632932] env[69927]: _type = "Task" [ 727.632932] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.643785] env[69927]: DEBUG oslo_vmware.api [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Task: {'id': task-4095550, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.660314] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095547, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.788379] env[69927]: DEBUG oslo_vmware.api [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': task-4095548, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.005723] env[69927]: DEBUG oslo_concurrency.lockutils [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.030s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 728.005723] env[69927]: DEBUG nova.compute.manager [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 728.009311] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.928s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 728.009850] env[69927]: DEBUG nova.objects.instance [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lazy-loading 'resources' on Instance uuid 4bf59fae-8029-421b-95fd-a0d008891ce7 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 728.152578] env[69927]: DEBUG oslo_vmware.api [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Task: {'id': task-4095550, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.164914] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095547, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.282077] env[69927]: DEBUG oslo_vmware.api [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': task-4095548, 'name': PowerOffVM_Task, 'duration_secs': 1.043888} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.284410] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 728.285200] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 728.286240] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f89f7f27-c8c0-4f69-bb3b-bf4dfb56050b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.378896] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 728.378896] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 728.378896] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Deleting the datastore file [datastore1] ee422a46-c6e4-4098-8f74-b9f0779d0fba {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 728.378896] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c35c2891-42e7-45aa-8afe-5775154eefa1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.395030] env[69927]: DEBUG oslo_vmware.api [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Waiting for the task: (returnval){ [ 728.395030] env[69927]: value = "task-4095552" [ 728.395030] env[69927]: _type = "Task" [ 728.395030] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.413625] env[69927]: DEBUG oslo_vmware.api [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': task-4095552, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.428056] env[69927]: DEBUG nova.network.neutron [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Updating instance_info_cache with network_info: [{"id": "67c7df0a-bb67-40ee-9a2d-11cea9dbacb7", "address": "fa:16:3e:89:98:e2", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.164", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67c7df0a-bb", "ovs_interfaceid": "67c7df0a-bb67-40ee-9a2d-11cea9dbacb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.516240] env[69927]: DEBUG nova.compute.utils [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 728.522407] env[69927]: DEBUG nova.compute.manager [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Not allocating networking since 'none' was specified. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 728.648932] env[69927]: DEBUG oslo_vmware.api [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Task: {'id': task-4095550, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.663069] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095547, 'name': CreateVM_Task, 'duration_secs': 1.564693} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.663474] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 728.664348] env[69927]: DEBUG oslo_concurrency.lockutils [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.664522] env[69927]: DEBUG oslo_concurrency.lockutils [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 728.664895] env[69927]: DEBUG oslo_concurrency.lockutils [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 728.665196] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-693b583b-30a7-497c-96f6-2d49cd1b4923 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.671749] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 728.671749] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529cf47c-b3f6-8491-5b36-2a3f05f96f62" [ 728.671749] env[69927]: _type = "Task" [ 728.671749] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.685708] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529cf47c-b3f6-8491-5b36-2a3f05f96f62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.912931] env[69927]: DEBUG nova.network.neutron [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Successfully updated port: fbaaf58d-7762-4361-af98-ac03d24f2a05 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 728.922582] env[69927]: DEBUG oslo_vmware.api [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Task: {'id': task-4095552, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.191896} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.923018] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 728.923221] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 728.923433] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 728.924242] env[69927]: INFO nova.compute.manager [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Took 2.19 seconds to destroy the instance on the hypervisor. [ 728.924501] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 728.924713] env[69927]: DEBUG nova.compute.manager [-] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 728.924885] env[69927]: DEBUG nova.network.neutron [-] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 728.931619] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Releasing lock "refresh_cache-9348e368-cc3c-4bde-91ae-26fd03ad536a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 729.025109] env[69927]: DEBUG nova.compute.manager [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 729.152796] env[69927]: DEBUG oslo_vmware.api [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Task: {'id': task-4095550, 'name': PowerOnVM_Task, 'duration_secs': 1.177054} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.153114] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 729.153378] env[69927]: INFO nova.compute.manager [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Took 8.79 seconds to spawn the instance on the hypervisor. [ 729.153608] env[69927]: DEBUG nova.compute.manager [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 729.155487] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9e661f8-83e7-4865-8e3c-53daa409b54b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.187567] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529cf47c-b3f6-8491-5b36-2a3f05f96f62, 'name': SearchDatastore_Task, 'duration_secs': 0.016347} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.187567] env[69927]: DEBUG oslo_concurrency.lockutils [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 729.188298] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 729.188393] env[69927]: DEBUG oslo_concurrency.lockutils [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.188595] env[69927]: DEBUG oslo_concurrency.lockutils [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 729.189229] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 729.189579] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-946d0e9f-a571-4220-a96d-02e8b526ffa5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.207023] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 729.207371] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 729.208174] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04876f8d-efda-40f9-915b-61277677483c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.217703] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 729.217703] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52dabe92-af8d-c4da-3a6b-38764f428f42" [ 729.217703] env[69927]: _type = "Task" [ 729.217703] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.230540] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52dabe92-af8d-c4da-3a6b-38764f428f42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.279169] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-452f5b9d-2e19-49d5-ae1d-475e86b173d0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.289628] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a19c838-0071-4d3f-be89-e9ddc5fca62c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.328532] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-790b2b59-7315-4282-8edb-e2815fbfe58e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.337341] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647dbedf-c286-413a-afb1-125853653947 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.360317] env[69927]: DEBUG nova.compute.provider_tree [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 729.421028] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Acquiring lock "refresh_cache-a536b069-45e0-4ffe-be53-ac33f8cb6ec0" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.421262] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Acquired lock "refresh_cache-a536b069-45e0-4ffe-be53-ac33f8cb6ec0" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 729.421415] env[69927]: DEBUG nova.network.neutron [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 729.465060] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8562a4f9-8394-4687-9dd1-a1fae3cc2502 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.487338] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1f083a3-cf5d-4079-87e5-2b6ad9171360 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.496719] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Updating instance '9348e368-cc3c-4bde-91ae-26fd03ad536a' progress to 83 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 729.680060] env[69927]: INFO nova.compute.manager [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Took 34.09 seconds to build instance. [ 729.732625] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52dabe92-af8d-c4da-3a6b-38764f428f42, 'name': SearchDatastore_Task, 'duration_secs': 0.019492} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.733563] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af7c96f2-8e4a-41a5-abd6-ff455d143695 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.740692] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 729.740692] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]527b5ae5-b483-d5f4-18c2-141399d5bc24" [ 729.740692] env[69927]: _type = "Task" [ 729.740692] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.752303] env[69927]: DEBUG nova.network.neutron [-] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.754027] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]527b5ae5-b483-d5f4-18c2-141399d5bc24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.865068] env[69927]: DEBUG nova.scheduler.client.report [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 729.949096] env[69927]: DEBUG nova.virt.hardware [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 729.949096] env[69927]: DEBUG nova.virt.hardware [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 729.949234] env[69927]: DEBUG nova.virt.hardware [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 729.949463] env[69927]: DEBUG nova.virt.hardware [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 729.949625] env[69927]: DEBUG nova.virt.hardware [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 729.950840] env[69927]: DEBUG nova.virt.hardware [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 729.950840] env[69927]: DEBUG nova.virt.hardware [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 729.950840] env[69927]: DEBUG nova.virt.hardware [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 729.950840] env[69927]: DEBUG nova.virt.hardware [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 729.950840] env[69927]: DEBUG nova.virt.hardware [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 729.951042] env[69927]: DEBUG nova.virt.hardware [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 729.956124] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d24c863-f350-4cca-950d-f78ac892b32b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.962870] env[69927]: DEBUG nova.network.neutron [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 729.968187] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cb1044-ebf1-986a-3dab-2e797f3efefe/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 729.970038] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13fb34fc-bd83-4c33-addf-bd254bedf96d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.976490] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc0073cb-28e1-4164-b2cb-53106d098355 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.998491] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cb1044-ebf1-986a-3dab-2e797f3efefe/disk-0.vmdk is in state: ready. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 729.998491] env[69927]: ERROR oslo_vmware.rw_handles [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cb1044-ebf1-986a-3dab-2e797f3efefe/disk-0.vmdk due to incomplete transfer. [ 729.998730] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d1e15cd2-45a2-46a4-b299-ed588ac8d6d8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.009997] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 730.012797] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aa3ce113-c29b-46da-91c9-dadbaa14d31e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.023932] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cb1044-ebf1-986a-3dab-2e797f3efefe/disk-0.vmdk. {{(pid=69927) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 730.023932] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Uploaded image d3f0871d-cf90-479c-afbc-39a5ec9421a4 to the Glance image server {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 730.026431] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Destroying the VM {{(pid=69927) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 730.028350] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8e0b07fd-2376-4177-95f3-40612d95eaf6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.030967] env[69927]: DEBUG oslo_vmware.api [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 730.030967] env[69927]: value = "task-4095553" [ 730.030967] env[69927]: _type = "Task" [ 730.030967] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.038987] env[69927]: DEBUG nova.compute.manager [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 730.042053] env[69927]: DEBUG oslo_vmware.api [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Waiting for the task: (returnval){ [ 730.042053] env[69927]: value = "task-4095554" [ 730.042053] env[69927]: _type = "Task" [ 730.042053] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.057102] env[69927]: DEBUG oslo_vmware.api [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095553, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.070376] env[69927]: DEBUG oslo_vmware.api [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095554, 'name': Destroy_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.074139] env[69927]: DEBUG nova.virt.hardware [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 730.074760] env[69927]: DEBUG nova.virt.hardware [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 730.074854] env[69927]: DEBUG nova.virt.hardware [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 730.075395] env[69927]: DEBUG nova.virt.hardware [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 730.075483] env[69927]: DEBUG nova.virt.hardware [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 730.075716] env[69927]: DEBUG nova.virt.hardware [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 730.076053] env[69927]: DEBUG nova.virt.hardware [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 730.076364] env[69927]: DEBUG nova.virt.hardware [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 730.080022] env[69927]: DEBUG nova.virt.hardware [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 730.080022] env[69927]: DEBUG nova.virt.hardware [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 730.080022] env[69927]: DEBUG nova.virt.hardware [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 730.080022] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c4215e-62d5-40d7-89e1-314577c5f559 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.087815] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82ac89c-4f91-4ed6-9c94-49a881db4c00 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.105332] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Instance VIF info [] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 730.112539] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Creating folder: Project (7ebdcd34fe5f45d58168c97179633dfc). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 730.115958] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-734b2b2d-aa6b-4907-9f3c-f569fd15ef97 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.131448] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Created folder: Project (7ebdcd34fe5f45d58168c97179633dfc) in parent group-v811283. [ 730.131448] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Creating folder: Instances. Parent ref: group-v811350. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 730.131448] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-66d13f86-3b36-4e4c-ba9d-27254bb02aa3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.144641] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Created folder: Instances in parent group-v811350. [ 730.144914] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 730.145142] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 730.145357] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-27e9e8c6-0e53-4ae6-8904-38847e37d36d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.163546] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 730.163546] env[69927]: value = "task-4095557" [ 730.163546] env[69927]: _type = "Task" [ 730.163546] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.175690] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095557, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.182398] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7fdd2f1a-9dca-4e70-9955-1bf0f49c364b tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Lock "6e698775-2556-4cbe-b65f-0cc3efa7bcf6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.926s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 730.213793] env[69927]: DEBUG nova.network.neutron [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Updating instance_info_cache with network_info: [{"id": "fbaaf58d-7762-4361-af98-ac03d24f2a05", "address": "fa:16:3e:a8:e3:d1", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.64", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbaaf58d-77", "ovs_interfaceid": "fbaaf58d-7762-4361-af98-ac03d24f2a05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.261883] env[69927]: INFO nova.compute.manager [-] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Took 1.34 seconds to deallocate network for instance. [ 730.263260] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]527b5ae5-b483-d5f4-18c2-141399d5bc24, 'name': SearchDatastore_Task, 'duration_secs': 0.013983} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.269278] env[69927]: DEBUG oslo_concurrency.lockutils [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 730.272767] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 39421358-2d66-4fbe-a4e0-8fdb0b420c5e/39421358-2d66-4fbe-a4e0-8fdb0b420c5e.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 730.272767] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-48818214-7f09-4862-bf82-26cee9069071 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.280042] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 730.280042] env[69927]: value = "task-4095558" [ 730.280042] env[69927]: _type = "Task" [ 730.280042] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.288683] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095558, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.376628] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.366s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 730.378778] env[69927]: DEBUG oslo_concurrency.lockutils [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.849s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 730.381681] env[69927]: INFO nova.compute.claims [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 730.415326] env[69927]: INFO nova.scheduler.client.report [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Deleted allocations for instance 4bf59fae-8029-421b-95fd-a0d008891ce7 [ 730.497073] env[69927]: DEBUG nova.compute.manager [req-e6d3f1fb-006e-446b-ae1e-33d1b159b416 req-61194924-2320-48ec-afea-f7468a42ae86 service nova] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Received event network-changed-85ea851a-ed88-452f-acd1-8e025fdeaeb0 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 730.497277] env[69927]: DEBUG nova.compute.manager [req-e6d3f1fb-006e-446b-ae1e-33d1b159b416 req-61194924-2320-48ec-afea-f7468a42ae86 service nova] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Refreshing instance network info cache due to event network-changed-85ea851a-ed88-452f-acd1-8e025fdeaeb0. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 730.499524] env[69927]: DEBUG oslo_concurrency.lockutils [req-e6d3f1fb-006e-446b-ae1e-33d1b159b416 req-61194924-2320-48ec-afea-f7468a42ae86 service nova] Acquiring lock "refresh_cache-39421358-2d66-4fbe-a4e0-8fdb0b420c5e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.499721] env[69927]: DEBUG oslo_concurrency.lockutils [req-e6d3f1fb-006e-446b-ae1e-33d1b159b416 req-61194924-2320-48ec-afea-f7468a42ae86 service nova] Acquired lock "refresh_cache-39421358-2d66-4fbe-a4e0-8fdb0b420c5e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 730.499895] env[69927]: DEBUG nova.network.neutron [req-e6d3f1fb-006e-446b-ae1e-33d1b159b416 req-61194924-2320-48ec-afea-f7468a42ae86 service nova] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Refreshing network info cache for port 85ea851a-ed88-452f-acd1-8e025fdeaeb0 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 730.546738] env[69927]: DEBUG oslo_vmware.api [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095553, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.559666] env[69927]: DEBUG oslo_vmware.api [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095554, 'name': Destroy_Task, 'duration_secs': 0.434839} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.563396] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Destroyed the VM [ 730.563396] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Deleting Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 730.563396] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b91c3580-9920-48f1-b144-8d2f27978a48 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.569578] env[69927]: DEBUG oslo_vmware.api [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Waiting for the task: (returnval){ [ 730.569578] env[69927]: value = "task-4095559" [ 730.569578] env[69927]: _type = "Task" [ 730.569578] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.581571] env[69927]: DEBUG oslo_vmware.api [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095559, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.681763] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095557, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.688391] env[69927]: DEBUG nova.compute.manager [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 730.716553] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Releasing lock "refresh_cache-a536b069-45e0-4ffe-be53-ac33f8cb6ec0" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 730.717346] env[69927]: DEBUG nova.compute.manager [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Instance network_info: |[{"id": "fbaaf58d-7762-4361-af98-ac03d24f2a05", "address": "fa:16:3e:a8:e3:d1", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.64", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbaaf58d-77", "ovs_interfaceid": "fbaaf58d-7762-4361-af98-ac03d24f2a05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 730.717938] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:e3:d1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '10b81051-1eb1-406b-888c-4548c470c77e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fbaaf58d-7762-4361-af98-ac03d24f2a05', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 730.728175] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Creating folder: Project (65f02266005e44b1a2d8d2eddec9795b). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 730.729270] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fc4b243b-c90a-4a30-90e6-8e106d90e47f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.748074] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Created folder: Project (65f02266005e44b1a2d8d2eddec9795b) in parent group-v811283. [ 730.748074] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Creating folder: Instances. Parent ref: group-v811353. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 730.748074] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1891e5c5-8eb5-4dfb-9090-50fcaf3a3912 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.763502] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Created folder: Instances in parent group-v811353. [ 730.763502] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 730.763820] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 730.765102] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d0965bfc-0f13-4782-be18-7ec90213f343 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.781809] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 730.797278] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095558, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.797656] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 730.797656] env[69927]: value = "task-4095562" [ 730.797656] env[69927]: _type = "Task" [ 730.797656] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.813028] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095562, 'name': CreateVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.924132] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e06695da-9e50-47e0-a991-92cd4d97661e tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "4bf59fae-8029-421b-95fd-a0d008891ce7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.943s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 731.046564] env[69927]: DEBUG oslo_vmware.api [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095553, 'name': PowerOnVM_Task, 'duration_secs': 0.729891} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.046564] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 731.046829] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd03117-e118-4419-909b-23805640e77c tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Updating instance '9348e368-cc3c-4bde-91ae-26fd03ad536a' progress to 100 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 731.082620] env[69927]: DEBUG oslo_vmware.api [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095559, 'name': RemoveSnapshot_Task} progress is 58%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.175560] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095557, 'name': CreateVM_Task, 'duration_secs': 0.833742} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.175811] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 731.176364] env[69927]: DEBUG oslo_concurrency.lockutils [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.176656] env[69927]: DEBUG oslo_concurrency.lockutils [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 731.176992] env[69927]: DEBUG oslo_concurrency.lockutils [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 731.177348] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd15059d-063c-421e-a2f9-a301f077c043 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.183882] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Waiting for the task: (returnval){ [ 731.183882] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523b168e-4f34-9aaf-307d-a52322297c83" [ 731.183882] env[69927]: _type = "Task" [ 731.183882] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.199281] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523b168e-4f34-9aaf-307d-a52322297c83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.221873] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 731.276522] env[69927]: DEBUG nova.network.neutron [req-e6d3f1fb-006e-446b-ae1e-33d1b159b416 req-61194924-2320-48ec-afea-f7468a42ae86 service nova] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Updated VIF entry in instance network info cache for port 85ea851a-ed88-452f-acd1-8e025fdeaeb0. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 731.276896] env[69927]: DEBUG nova.network.neutron [req-e6d3f1fb-006e-446b-ae1e-33d1b159b416 req-61194924-2320-48ec-afea-f7468a42ae86 service nova] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Updating instance_info_cache with network_info: [{"id": "85ea851a-ed88-452f-acd1-8e025fdeaeb0", "address": "fa:16:3e:17:38:c3", "network": {"id": "2cac2664-2b1d-4bb6-a58a-f8e96679d038", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1945522269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3afde63c8cbe4aecb32a470fd6b948f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85ea851a-ed", "ovs_interfaceid": "85ea851a-ed88-452f-acd1-8e025fdeaeb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.299572] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095558, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.80151} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.303284] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 39421358-2d66-4fbe-a4e0-8fdb0b420c5e/39421358-2d66-4fbe-a4e0-8fdb0b420c5e.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 731.303555] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 731.304308] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9f93a633-ff4c-4d28-bc4d-00ebb14f8097 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.314384] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095562, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.316134] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 731.316134] env[69927]: value = "task-4095563" [ 731.316134] env[69927]: _type = "Task" [ 731.316134] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.325668] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095563, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.584581] env[69927]: DEBUG oslo_vmware.api [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095559, 'name': RemoveSnapshot_Task, 'duration_secs': 0.791395} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.584581] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Deleted Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 731.584799] env[69927]: INFO nova.compute.manager [None req-b5178a9e-e908-4acc-97f6-c2a34ac33da8 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Took 17.22 seconds to snapshot the instance on the hypervisor. [ 731.699366] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523b168e-4f34-9aaf-307d-a52322297c83, 'name': SearchDatastore_Task, 'duration_secs': 0.01504} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.699714] env[69927]: DEBUG oslo_concurrency.lockutils [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 731.699982] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 731.700290] env[69927]: DEBUG oslo_concurrency.lockutils [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.700446] env[69927]: DEBUG oslo_concurrency.lockutils [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 731.700653] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 731.700977] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-347c9c97-2cbc-4b13-abac-c7e824f1a334 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.711761] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 731.712068] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 731.712927] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da24471e-3a52-4f52-8d73-016d527c8771 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.719869] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Waiting for the task: (returnval){ [ 731.719869] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f6190c-0b3d-5f4b-0abe-ec1447ef157a" [ 731.719869] env[69927]: _type = "Task" [ 731.719869] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.733666] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f6190c-0b3d-5f4b-0abe-ec1447ef157a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.780228] env[69927]: DEBUG oslo_concurrency.lockutils [req-e6d3f1fb-006e-446b-ae1e-33d1b159b416 req-61194924-2320-48ec-afea-f7468a42ae86 service nova] Releasing lock "refresh_cache-39421358-2d66-4fbe-a4e0-8fdb0b420c5e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 731.813095] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095562, 'name': CreateVM_Task, 'duration_secs': 0.719179} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.816834] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 731.818058] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.819047] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 731.819576] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 731.823640] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1d39a59-5d9c-4eac-afdb-4b86aee2f440 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.834684] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095563, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081318} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.835358] env[69927]: DEBUG oslo_vmware.api [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 731.835358] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52faf9e4-7d4f-29b4-82c6-1b2e0bdb76c2" [ 731.835358] env[69927]: _type = "Task" [ 731.835358] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.842388] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 731.842388] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e1656b-32b5-4644-ba0b-7f70ee38b4d5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.853547] env[69927]: DEBUG oslo_vmware.api [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52faf9e4-7d4f-29b4-82c6-1b2e0bdb76c2, 'name': SearchDatastore_Task, 'duration_secs': 0.01695} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.868242] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 731.868725] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 731.869095] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.880150] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] 39421358-2d66-4fbe-a4e0-8fdb0b420c5e/39421358-2d66-4fbe-a4e0-8fdb0b420c5e.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 731.881103] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7234961f-cb61-407f-b4ea-620fce7524a4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.908583] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 731.908583] env[69927]: value = "task-4095564" [ 731.908583] env[69927]: _type = "Task" [ 731.908583] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.920775] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095564, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.056506] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e4cbd5b-64a1-47ed-a7ad-cbb35f264aae {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.068879] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75fe3d5f-881c-4bec-bc90-4a35a708ad03 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.107704] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b15c0cec-49a6-40cc-81ac-8c08bc80a37d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.114134] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec661962-0685-4f67-b5dd-75ee24f45a22 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.130322] env[69927]: DEBUG nova.compute.provider_tree [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 732.238924] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f6190c-0b3d-5f4b-0abe-ec1447ef157a, 'name': SearchDatastore_Task, 'duration_secs': 0.018254} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.240110] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e161afd0-7159-431d-b7a6-6e93ec6b0b2f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.247168] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Waiting for the task: (returnval){ [ 732.247168] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5225fbb7-7af2-f8fc-bc2d-20996278104e" [ 732.247168] env[69927]: _type = "Task" [ 732.247168] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.259133] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5225fbb7-7af2-f8fc-bc2d-20996278104e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.421642] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095564, 'name': ReconfigVM_Task, 'duration_secs': 0.318815} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.422476] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Reconfigured VM instance instance-00000014 to attach disk [datastore1] 39421358-2d66-4fbe-a4e0-8fdb0b420c5e/39421358-2d66-4fbe-a4e0-8fdb0b420c5e.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 732.425022] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9b3342dd-4307-4cf6-b2ae-5cd9b1a54464 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.435756] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 732.435756] env[69927]: value = "task-4095565" [ 732.435756] env[69927]: _type = "Task" [ 732.435756] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.450029] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095565, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.634110] env[69927]: DEBUG nova.scheduler.client.report [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 732.759112] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5225fbb7-7af2-f8fc-bc2d-20996278104e, 'name': SearchDatastore_Task, 'duration_secs': 0.019031} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.759491] env[69927]: DEBUG oslo_concurrency.lockutils [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 732.759741] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] e8e80be6-a82f-4cc5-92fd-366badf519b8/e8e80be6-a82f-4cc5-92fd-366badf519b8.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 732.760060] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 732.760260] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 732.760501] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-faa1432f-2c0f-46ba-a8ea-d7fb54cea1c8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.763546] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-62af68b3-b486-47ca-8abc-7bb087bd3b5e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.773232] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Waiting for the task: (returnval){ [ 732.773232] env[69927]: value = "task-4095566" [ 732.773232] env[69927]: _type = "Task" [ 732.773232] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.774700] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 732.775318] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 732.779617] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9370dbe7-0543-4924-ad2c-e91a4f63780b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.790954] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Task: {'id': task-4095566, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.793073] env[69927]: DEBUG oslo_vmware.api [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 732.793073] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52075cca-8c74-ae63-06c9-4bda28732381" [ 732.793073] env[69927]: _type = "Task" [ 732.793073] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.803184] env[69927]: DEBUG oslo_vmware.api [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52075cca-8c74-ae63-06c9-4bda28732381, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.951262] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095565, 'name': Rename_Task, 'duration_secs': 0.162886} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.952136] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 732.952466] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-87c977dd-cc81-4a3a-85ba-7bc67d154f4d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.963546] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 732.963546] env[69927]: value = "task-4095567" [ 732.963546] env[69927]: _type = "Task" [ 732.963546] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.974859] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095567, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.142937] env[69927]: DEBUG oslo_concurrency.lockutils [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.764s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 733.143393] env[69927]: DEBUG nova.compute.manager [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 733.146595] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.255s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 733.147022] env[69927]: DEBUG nova.objects.instance [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Lazy-loading 'resources' on Instance uuid c45d2259-2a05-49d5-81eb-4c79ced83121 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 733.214789] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "ff227e07-8e36-48d6-a8c7-1e0087fd1faa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 733.215074] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "ff227e07-8e36-48d6-a8c7-1e0087fd1faa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 733.242970] env[69927]: DEBUG nova.compute.manager [req-4ef5c5c6-abce-43b1-b95c-961f7ae77263 req-a942deeb-8052-46f0-b2b1-d7c1a2a00c2f service nova] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Received event network-vif-plugged-fbaaf58d-7762-4361-af98-ac03d24f2a05 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 733.244011] env[69927]: DEBUG oslo_concurrency.lockutils [req-4ef5c5c6-abce-43b1-b95c-961f7ae77263 req-a942deeb-8052-46f0-b2b1-d7c1a2a00c2f service nova] Acquiring lock "a536b069-45e0-4ffe-be53-ac33f8cb6ec0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 733.244685] env[69927]: DEBUG oslo_concurrency.lockutils [req-4ef5c5c6-abce-43b1-b95c-961f7ae77263 req-a942deeb-8052-46f0-b2b1-d7c1a2a00c2f service nova] Lock "a536b069-45e0-4ffe-be53-ac33f8cb6ec0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 733.244685] env[69927]: DEBUG oslo_concurrency.lockutils [req-4ef5c5c6-abce-43b1-b95c-961f7ae77263 req-a942deeb-8052-46f0-b2b1-d7c1a2a00c2f service nova] Lock "a536b069-45e0-4ffe-be53-ac33f8cb6ec0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 733.244764] env[69927]: DEBUG nova.compute.manager [req-4ef5c5c6-abce-43b1-b95c-961f7ae77263 req-a942deeb-8052-46f0-b2b1-d7c1a2a00c2f service nova] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] No waiting events found dispatching network-vif-plugged-fbaaf58d-7762-4361-af98-ac03d24f2a05 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 733.244996] env[69927]: WARNING nova.compute.manager [req-4ef5c5c6-abce-43b1-b95c-961f7ae77263 req-a942deeb-8052-46f0-b2b1-d7c1a2a00c2f service nova] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Received unexpected event network-vif-plugged-fbaaf58d-7762-4361-af98-ac03d24f2a05 for instance with vm_state building and task_state spawning. [ 733.245321] env[69927]: DEBUG nova.compute.manager [req-4ef5c5c6-abce-43b1-b95c-961f7ae77263 req-a942deeb-8052-46f0-b2b1-d7c1a2a00c2f service nova] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Received event network-changed-fbaaf58d-7762-4361-af98-ac03d24f2a05 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 733.245490] env[69927]: DEBUG nova.compute.manager [req-4ef5c5c6-abce-43b1-b95c-961f7ae77263 req-a942deeb-8052-46f0-b2b1-d7c1a2a00c2f service nova] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Refreshing instance network info cache due to event network-changed-fbaaf58d-7762-4361-af98-ac03d24f2a05. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 733.245690] env[69927]: DEBUG oslo_concurrency.lockutils [req-4ef5c5c6-abce-43b1-b95c-961f7ae77263 req-a942deeb-8052-46f0-b2b1-d7c1a2a00c2f service nova] Acquiring lock "refresh_cache-a536b069-45e0-4ffe-be53-ac33f8cb6ec0" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.245832] env[69927]: DEBUG oslo_concurrency.lockutils [req-4ef5c5c6-abce-43b1-b95c-961f7ae77263 req-a942deeb-8052-46f0-b2b1-d7c1a2a00c2f service nova] Acquired lock "refresh_cache-a536b069-45e0-4ffe-be53-ac33f8cb6ec0" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 733.245978] env[69927]: DEBUG nova.network.neutron [req-4ef5c5c6-abce-43b1-b95c-961f7ae77263 req-a942deeb-8052-46f0-b2b1-d7c1a2a00c2f service nova] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Refreshing network info cache for port fbaaf58d-7762-4361-af98-ac03d24f2a05 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 733.284783] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Task: {'id': task-4095566, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.304809] env[69927]: DEBUG oslo_vmware.api [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52075cca-8c74-ae63-06c9-4bda28732381, 'name': SearchDatastore_Task, 'duration_secs': 0.014488} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.305819] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b45760da-b031-46bd-9b29-375694e2313c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.313183] env[69927]: DEBUG oslo_vmware.api [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 733.313183] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5222294a-36fb-d61e-e09c-685ba4e62c9d" [ 733.313183] env[69927]: _type = "Task" [ 733.313183] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.327366] env[69927]: DEBUG oslo_vmware.api [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5222294a-36fb-d61e-e09c-685ba4e62c9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.479459] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095567, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.650589] env[69927]: DEBUG nova.compute.utils [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 733.656471] env[69927]: DEBUG nova.compute.manager [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 733.656471] env[69927]: DEBUG nova.network.neutron [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 733.709653] env[69927]: DEBUG nova.policy [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '44343ce64a174585ac19f26149f9a480', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5e5c81a6491a4090b807b7328df7d8ae', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 733.793031] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Task: {'id': task-4095566, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.623118} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.793031] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] e8e80be6-a82f-4cc5-92fd-366badf519b8/e8e80be6-a82f-4cc5-92fd-366badf519b8.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 733.793031] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 733.793031] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e30dae93-7d32-4ee8-a7fe-c700b1b51d73 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.807081] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Waiting for the task: (returnval){ [ 733.807081] env[69927]: value = "task-4095568" [ 733.807081] env[69927]: _type = "Task" [ 733.807081] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.822900] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Task: {'id': task-4095568, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.831104] env[69927]: DEBUG oslo_vmware.api [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5222294a-36fb-d61e-e09c-685ba4e62c9d, 'name': SearchDatastore_Task, 'duration_secs': 0.058526} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.831104] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 733.831104] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] a536b069-45e0-4ffe-be53-ac33f8cb6ec0/a536b069-45e0-4ffe-be53-ac33f8cb6ec0.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 733.831104] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-56b8a338-51d7-4be3-b19b-0651be64b05f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.842888] env[69927]: DEBUG oslo_vmware.api [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 733.842888] env[69927]: value = "task-4095569" [ 733.842888] env[69927]: _type = "Task" [ 733.842888] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.859600] env[69927]: DEBUG oslo_vmware.api [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095569, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.900244] env[69927]: DEBUG oslo_vmware.rw_handles [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f4a123-85ef-a17d-8bc5-9a302498be93/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 733.901648] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f8ca52-fda8-40c7-8914-c3ebd3e83432 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.911850] env[69927]: DEBUG oslo_vmware.rw_handles [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f4a123-85ef-a17d-8bc5-9a302498be93/disk-0.vmdk is in state: ready. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 733.911850] env[69927]: ERROR oslo_vmware.rw_handles [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f4a123-85ef-a17d-8bc5-9a302498be93/disk-0.vmdk due to incomplete transfer. [ 733.912108] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-004d5a7c-569e-4d5d-8395-5db6f0332256 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.931372] env[69927]: DEBUG oslo_vmware.rw_handles [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f4a123-85ef-a17d-8bc5-9a302498be93/disk-0.vmdk. {{(pid=69927) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 733.931680] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Uploaded image dca99a17-c51b-43ff-b199-20dc19f87d2a to the Glance image server {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 733.933379] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Destroying the VM {{(pid=69927) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 733.934278] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-75d362ce-db21-4eea-8824-cfa86584cd3b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.942829] env[69927]: DEBUG oslo_vmware.api [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 733.942829] env[69927]: value = "task-4095570" [ 733.942829] env[69927]: _type = "Task" [ 733.942829] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.963640] env[69927]: DEBUG oslo_vmware.api [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095570, 'name': Destroy_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.979663] env[69927]: DEBUG oslo_vmware.api [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095567, 'name': PowerOnVM_Task, 'duration_secs': 0.689474} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.986674] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 733.987065] env[69927]: INFO nova.compute.manager [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Took 10.77 seconds to spawn the instance on the hypervisor. [ 733.987349] env[69927]: DEBUG nova.compute.manager [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 733.988798] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf2ef667-8b87-4f2e-b12b-f702508fa083 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.050125] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquiring lock "e0bca101-cf8d-48e1-a331-b0018548593e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 734.050399] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Lock "e0bca101-cf8d-48e1-a331-b0018548593e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 734.121823] env[69927]: DEBUG nova.network.neutron [req-4ef5c5c6-abce-43b1-b95c-961f7ae77263 req-a942deeb-8052-46f0-b2b1-d7c1a2a00c2f service nova] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Updated VIF entry in instance network info cache for port fbaaf58d-7762-4361-af98-ac03d24f2a05. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 734.122129] env[69927]: DEBUG nova.network.neutron [req-4ef5c5c6-abce-43b1-b95c-961f7ae77263 req-a942deeb-8052-46f0-b2b1-d7c1a2a00c2f service nova] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Updating instance_info_cache with network_info: [{"id": "fbaaf58d-7762-4361-af98-ac03d24f2a05", "address": "fa:16:3e:a8:e3:d1", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.64", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbaaf58d-77", "ovs_interfaceid": "fbaaf58d-7762-4361-af98-ac03d24f2a05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.157774] env[69927]: DEBUG nova.compute.manager [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 734.193767] env[69927]: DEBUG nova.network.neutron [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Successfully created port: f02a1745-61d5-4414-88fe-680d9e7bba72 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 734.318498] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Task: {'id': task-4095568, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.188427} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.321566] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 734.322711] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b5e161-8a0e-4a80-a118-b77be8b9811e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.345024] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] e8e80be6-a82f-4cc5-92fd-366badf519b8/e8e80be6-a82f-4cc5-92fd-366badf519b8.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 734.349426] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c934720d-c8a1-40d5-9d41-c8c7c38574b2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.378044] env[69927]: DEBUG oslo_concurrency.lockutils [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "e1b3d0bc-a251-4dbd-89a6-216a2f2c1313" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 734.378044] env[69927]: DEBUG oslo_concurrency.lockutils [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "e1b3d0bc-a251-4dbd-89a6-216a2f2c1313" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 734.388360] env[69927]: DEBUG oslo_vmware.api [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095569, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.392612] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Waiting for the task: (returnval){ [ 734.392612] env[69927]: value = "task-4095571" [ 734.392612] env[69927]: _type = "Task" [ 734.392612] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.405366] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Task: {'id': task-4095571, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.409894] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3182c74b-3de6-4493-9abc-fb7315167326 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.419457] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69b80ab-6189-4c35-82b5-7a41f0881795 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.465897] env[69927]: DEBUG oslo_concurrency.lockutils [None req-42cf5530-a9a6-4592-8f5d-a2c041dffc3e tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "9348e368-cc3c-4bde-91ae-26fd03ad536a" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 734.466327] env[69927]: DEBUG oslo_concurrency.lockutils [None req-42cf5530-a9a6-4592-8f5d-a2c041dffc3e tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "9348e368-cc3c-4bde-91ae-26fd03ad536a" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 734.466635] env[69927]: DEBUG nova.compute.manager [None req-42cf5530-a9a6-4592-8f5d-a2c041dffc3e tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Going to confirm migration 1 {{(pid=69927) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 734.475408] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4b18d33-a61e-47ed-956f-0316f1796800 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.494200] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-916a3d8d-1eaa-44f1-88f6-15c397c781bf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.500880] env[69927]: DEBUG oslo_vmware.api [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095570, 'name': Destroy_Task, 'duration_secs': 0.393475} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.501310] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Destroyed the VM [ 734.501639] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Deleting Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 734.502537] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6a1f222d-eaeb-44c4-85e2-0154d8ed86b2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.523687] env[69927]: DEBUG nova.compute.provider_tree [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 734.530723] env[69927]: INFO nova.compute.manager [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Took 38.66 seconds to build instance. [ 734.534136] env[69927]: DEBUG oslo_vmware.api [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 734.534136] env[69927]: value = "task-4095572" [ 734.534136] env[69927]: _type = "Task" [ 734.534136] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.551213] env[69927]: DEBUG oslo_vmware.api [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095572, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.626856] env[69927]: DEBUG oslo_concurrency.lockutils [req-4ef5c5c6-abce-43b1-b95c-961f7ae77263 req-a942deeb-8052-46f0-b2b1-d7c1a2a00c2f service nova] Releasing lock "refresh_cache-a536b069-45e0-4ffe-be53-ac33f8cb6ec0" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 734.626856] env[69927]: DEBUG nova.compute.manager [req-4ef5c5c6-abce-43b1-b95c-961f7ae77263 req-a942deeb-8052-46f0-b2b1-d7c1a2a00c2f service nova] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Received event network-vif-deleted-4e3d0fe6-1171-4bdd-bfab-86bb4f7af637 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 734.626856] env[69927]: DEBUG nova.compute.manager [req-4ef5c5c6-abce-43b1-b95c-961f7ae77263 req-a942deeb-8052-46f0-b2b1-d7c1a2a00c2f service nova] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Received event network-changed-32ace01f-025d-4978-a510-c851c8daf246 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 734.627179] env[69927]: DEBUG nova.compute.manager [req-4ef5c5c6-abce-43b1-b95c-961f7ae77263 req-a942deeb-8052-46f0-b2b1-d7c1a2a00c2f service nova] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Refreshing instance network info cache due to event network-changed-32ace01f-025d-4978-a510-c851c8daf246. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 734.627434] env[69927]: DEBUG oslo_concurrency.lockutils [req-4ef5c5c6-abce-43b1-b95c-961f7ae77263 req-a942deeb-8052-46f0-b2b1-d7c1a2a00c2f service nova] Acquiring lock "refresh_cache-6e698775-2556-4cbe-b65f-0cc3efa7bcf6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.627627] env[69927]: DEBUG oslo_concurrency.lockutils [req-4ef5c5c6-abce-43b1-b95c-961f7ae77263 req-a942deeb-8052-46f0-b2b1-d7c1a2a00c2f service nova] Acquired lock "refresh_cache-6e698775-2556-4cbe-b65f-0cc3efa7bcf6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 734.627797] env[69927]: DEBUG nova.network.neutron [req-4ef5c5c6-abce-43b1-b95c-961f7ae77263 req-a942deeb-8052-46f0-b2b1-d7c1a2a00c2f service nova] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Refreshing network info cache for port 32ace01f-025d-4978-a510-c851c8daf246 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 734.861659] env[69927]: DEBUG oslo_vmware.api [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095569, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.972785} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.861659] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] a536b069-45e0-4ffe-be53-ac33f8cb6ec0/a536b069-45e0-4ffe-be53-ac33f8cb6ec0.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 734.861659] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 734.862400] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3226fe87-af5f-4e0b-ae5e-9603bfee82b0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.869781] env[69927]: DEBUG oslo_vmware.api [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 734.869781] env[69927]: value = "task-4095573" [ 734.869781] env[69927]: _type = "Task" [ 734.869781] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.886309] env[69927]: DEBUG oslo_vmware.api [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095573, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.904382] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Task: {'id': task-4095571, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.033479] env[69927]: DEBUG nova.scheduler.client.report [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 735.037606] env[69927]: DEBUG oslo_concurrency.lockutils [None req-22ecb8ac-2b09-4742-9bd8-d82dfe4f6c3d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "39421358-2d66-4fbe-a4e0-8fdb0b420c5e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.003s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.050811] env[69927]: DEBUG oslo_vmware.api [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095572, 'name': RemoveSnapshot_Task, 'duration_secs': 0.450803} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.052355] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Deleted Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 735.052355] env[69927]: INFO nova.compute.manager [None req-615d93c1-3b29-42a1-9d3d-c85134d62056 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Took 13.92 seconds to snapshot the instance on the hypervisor. [ 735.129917] env[69927]: DEBUG oslo_concurrency.lockutils [None req-42cf5530-a9a6-4592-8f5d-a2c041dffc3e tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "refresh_cache-9348e368-cc3c-4bde-91ae-26fd03ad536a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.132064] env[69927]: DEBUG oslo_concurrency.lockutils [None req-42cf5530-a9a6-4592-8f5d-a2c041dffc3e tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquired lock "refresh_cache-9348e368-cc3c-4bde-91ae-26fd03ad536a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 735.132064] env[69927]: DEBUG nova.network.neutron [None req-42cf5530-a9a6-4592-8f5d-a2c041dffc3e tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 735.132064] env[69927]: DEBUG nova.objects.instance [None req-42cf5530-a9a6-4592-8f5d-a2c041dffc3e tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lazy-loading 'info_cache' on Instance uuid 9348e368-cc3c-4bde-91ae-26fd03ad536a {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 735.170412] env[69927]: DEBUG nova.compute.manager [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 735.196356] env[69927]: DEBUG nova.virt.hardware [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 735.196637] env[69927]: DEBUG nova.virt.hardware [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 735.198312] env[69927]: DEBUG nova.virt.hardware [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 735.198771] env[69927]: DEBUG nova.virt.hardware [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 735.198771] env[69927]: DEBUG nova.virt.hardware [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 735.198924] env[69927]: DEBUG nova.virt.hardware [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 735.199171] env[69927]: DEBUG nova.virt.hardware [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 735.199331] env[69927]: DEBUG nova.virt.hardware [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 735.199502] env[69927]: DEBUG nova.virt.hardware [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 735.199675] env[69927]: DEBUG nova.virt.hardware [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 735.199866] env[69927]: DEBUG nova.virt.hardware [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 735.200814] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b750655-2aa8-4e27-805a-b247cc9189aa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.211893] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39cd0c93-1240-40c0-870f-fc945177568f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.381728] env[69927]: DEBUG oslo_vmware.api [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095573, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082218} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.382111] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 735.382931] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7968e072-a994-409f-bf60-f12ece25d8bc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.413431] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] a536b069-45e0-4ffe-be53-ac33f8cb6ec0/a536b069-45e0-4ffe-be53-ac33f8cb6ec0.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 735.415181] env[69927]: DEBUG nova.network.neutron [req-4ef5c5c6-abce-43b1-b95c-961f7ae77263 req-a942deeb-8052-46f0-b2b1-d7c1a2a00c2f service nova] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Updated VIF entry in instance network info cache for port 32ace01f-025d-4978-a510-c851c8daf246. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 735.415567] env[69927]: DEBUG nova.network.neutron [req-4ef5c5c6-abce-43b1-b95c-961f7ae77263 req-a942deeb-8052-46f0-b2b1-d7c1a2a00c2f service nova] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Updating instance_info_cache with network_info: [{"id": "32ace01f-025d-4978-a510-c851c8daf246", "address": "fa:16:3e:0a:51:0d", "network": {"id": "31bda7f6-2b49-4aee-82dd-68e864f7587c", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-956592551-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd854419925b43cd8caaa303773ac093", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a", "external-id": "nsx-vlan-transportzone-256", "segmentation_id": 256, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32ace01f-02", "ovs_interfaceid": "32ace01f-025d-4978-a510-c851c8daf246", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.422765] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d870221-34dc-4609-afab-1b2ca29ce651 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.438165] env[69927]: DEBUG oslo_concurrency.lockutils [req-4ef5c5c6-abce-43b1-b95c-961f7ae77263 req-a942deeb-8052-46f0-b2b1-d7c1a2a00c2f service nova] Releasing lock "refresh_cache-6e698775-2556-4cbe-b65f-0cc3efa7bcf6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 735.447718] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Task: {'id': task-4095571, 'name': ReconfigVM_Task, 'duration_secs': 0.723781} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.449111] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Reconfigured VM instance instance-00000016 to attach disk [datastore1] e8e80be6-a82f-4cc5-92fd-366badf519b8/e8e80be6-a82f-4cc5-92fd-366badf519b8.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 735.449815] env[69927]: DEBUG oslo_vmware.api [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 735.449815] env[69927]: value = "task-4095574" [ 735.449815] env[69927]: _type = "Task" [ 735.449815] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.450022] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cd3687ae-f861-454b-a940-41491f4fdc49 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.461616] env[69927]: DEBUG oslo_vmware.api [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095574, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.463199] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Waiting for the task: (returnval){ [ 735.463199] env[69927]: value = "task-4095575" [ 735.463199] env[69927]: _type = "Task" [ 735.463199] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.473911] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Task: {'id': task-4095575, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.530526] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "39421358-2d66-4fbe-a4e0-8fdb0b420c5e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.530526] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "39421358-2d66-4fbe-a4e0-8fdb0b420c5e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 735.530526] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "39421358-2d66-4fbe-a4e0-8fdb0b420c5e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.530526] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "39421358-2d66-4fbe-a4e0-8fdb0b420c5e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 735.530809] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "39421358-2d66-4fbe-a4e0-8fdb0b420c5e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.532941] env[69927]: INFO nova.compute.manager [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Terminating instance [ 735.540288] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.394s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.543129] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.897s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 735.546384] env[69927]: INFO nova.compute.claims [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 735.549517] env[69927]: DEBUG nova.compute.manager [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 735.572207] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Acquiring lock "2cdfda66-1d93-4960-a129-2788f10fa593" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.572344] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Lock "2cdfda66-1d93-4960-a129-2788f10fa593" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 735.572582] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Acquiring lock "2cdfda66-1d93-4960-a129-2788f10fa593-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.572770] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Lock "2cdfda66-1d93-4960-a129-2788f10fa593-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 735.572960] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Lock "2cdfda66-1d93-4960-a129-2788f10fa593-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.576788] env[69927]: INFO nova.scheduler.client.report [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Deleted allocations for instance c45d2259-2a05-49d5-81eb-4c79ced83121 [ 735.578325] env[69927]: INFO nova.compute.manager [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Terminating instance [ 735.794321] env[69927]: DEBUG nova.network.neutron [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Successfully updated port: f02a1745-61d5-4414-88fe-680d9e7bba72 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 735.936180] env[69927]: DEBUG nova.compute.manager [req-86b00cdc-b7d4-4d32-831d-435d5eacd765 req-d2daaf02-8a79-4c36-929f-00e973a6877b service nova] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Received event network-vif-plugged-f02a1745-61d5-4414-88fe-680d9e7bba72 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 735.936507] env[69927]: DEBUG oslo_concurrency.lockutils [req-86b00cdc-b7d4-4d32-831d-435d5eacd765 req-d2daaf02-8a79-4c36-929f-00e973a6877b service nova] Acquiring lock "77c6ce9e-5e15-41e4-aa81-1ef01248aa32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.936597] env[69927]: DEBUG oslo_concurrency.lockutils [req-86b00cdc-b7d4-4d32-831d-435d5eacd765 req-d2daaf02-8a79-4c36-929f-00e973a6877b service nova] Lock "77c6ce9e-5e15-41e4-aa81-1ef01248aa32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 735.936809] env[69927]: DEBUG oslo_concurrency.lockutils [req-86b00cdc-b7d4-4d32-831d-435d5eacd765 req-d2daaf02-8a79-4c36-929f-00e973a6877b service nova] Lock "77c6ce9e-5e15-41e4-aa81-1ef01248aa32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.937107] env[69927]: DEBUG nova.compute.manager [req-86b00cdc-b7d4-4d32-831d-435d5eacd765 req-d2daaf02-8a79-4c36-929f-00e973a6877b service nova] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] No waiting events found dispatching network-vif-plugged-f02a1745-61d5-4414-88fe-680d9e7bba72 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 735.937493] env[69927]: WARNING nova.compute.manager [req-86b00cdc-b7d4-4d32-831d-435d5eacd765 req-d2daaf02-8a79-4c36-929f-00e973a6877b service nova] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Received unexpected event network-vif-plugged-f02a1745-61d5-4414-88fe-680d9e7bba72 for instance with vm_state building and task_state spawning. [ 735.962641] env[69927]: DEBUG oslo_vmware.api [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095574, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.974223] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Task: {'id': task-4095575, 'name': Rename_Task, 'duration_secs': 0.233541} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.974658] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 735.974828] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2745d395-2a73-4688-9066-8456427db0ea {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.985013] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Waiting for the task: (returnval){ [ 735.985013] env[69927]: value = "task-4095576" [ 735.985013] env[69927]: _type = "Task" [ 735.985013] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.994316] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Task: {'id': task-4095576, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.037876] env[69927]: DEBUG nova.compute.manager [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 736.037876] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 736.038585] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab07f30-1ec7-4dac-b8cc-900371eb2867 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.050192] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 736.050192] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f40c3247-bd1f-4ef1-911b-07a2fc2fb408 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.062055] env[69927]: DEBUG oslo_vmware.api [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 736.062055] env[69927]: value = "task-4095577" [ 736.062055] env[69927]: _type = "Task" [ 736.062055] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.077780] env[69927]: DEBUG oslo_vmware.api [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095577, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.084507] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 736.092926] env[69927]: DEBUG nova.compute.manager [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 736.094494] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 736.094494] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c384349a-b5c2-4e7d-a35f-b9d131d83588 tempest-ServerShowV254Test-369491814 tempest-ServerShowV254Test-369491814-project-member] Lock "c45d2259-2a05-49d5-81eb-4c79ced83121" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.099s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 736.095778] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd696546-012d-4df5-832a-6b00e4b909de {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.106599] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 736.109144] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-890cb8ef-d22b-4cf9-a242-d746768e66c0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.117390] env[69927]: DEBUG oslo_vmware.api [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Waiting for the task: (returnval){ [ 736.117390] env[69927]: value = "task-4095578" [ 736.117390] env[69927]: _type = "Task" [ 736.117390] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.133901] env[69927]: DEBUG oslo_vmware.api [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095578, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.299081] env[69927]: DEBUG oslo_concurrency.lockutils [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquiring lock "refresh_cache-77c6ce9e-5e15-41e4-aa81-1ef01248aa32" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.299081] env[69927]: DEBUG oslo_concurrency.lockutils [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquired lock "refresh_cache-77c6ce9e-5e15-41e4-aa81-1ef01248aa32" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 736.299081] env[69927]: DEBUG nova.network.neutron [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 736.396513] env[69927]: DEBUG nova.network.neutron [None req-42cf5530-a9a6-4592-8f5d-a2c041dffc3e tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Updating instance_info_cache with network_info: [{"id": "67c7df0a-bb67-40ee-9a2d-11cea9dbacb7", "address": "fa:16:3e:89:98:e2", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.164", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67c7df0a-bb", "ovs_interfaceid": "67c7df0a-bb67-40ee-9a2d-11cea9dbacb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.462675] env[69927]: DEBUG oslo_vmware.api [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095574, 'name': ReconfigVM_Task, 'duration_secs': 0.711203} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.463454] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Reconfigured VM instance instance-00000015 to attach disk [datastore1] a536b069-45e0-4ffe-be53-ac33f8cb6ec0/a536b069-45e0-4ffe-be53-ac33f8cb6ec0.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 736.464205] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f8620b64-542a-4448-be11-676f357d87fb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.474577] env[69927]: DEBUG oslo_vmware.api [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 736.474577] env[69927]: value = "task-4095579" [ 736.474577] env[69927]: _type = "Task" [ 736.474577] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.486570] env[69927]: DEBUG oslo_vmware.api [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095579, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.496839] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Task: {'id': task-4095576, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.524593] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "128d0705-21a0-4103-ae84-85bbac7e718b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 736.525096] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "128d0705-21a0-4103-ae84-85bbac7e718b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 736.525376] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "128d0705-21a0-4103-ae84-85bbac7e718b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 736.525665] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "128d0705-21a0-4103-ae84-85bbac7e718b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 736.525759] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "128d0705-21a0-4103-ae84-85bbac7e718b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 736.528307] env[69927]: INFO nova.compute.manager [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Terminating instance [ 736.576382] env[69927]: DEBUG oslo_vmware.api [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095577, 'name': PowerOffVM_Task, 'duration_secs': 0.247416} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.577478] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 736.577478] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 736.577478] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6d4317f3-1510-497d-bf3e-95e27e4aea4d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.629842] env[69927]: DEBUG oslo_vmware.api [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095578, 'name': PowerOffVM_Task, 'duration_secs': 0.226793} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.633852] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 736.635869] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 736.635869] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2da1b732-c2c2-4635-adb1-01a346fe8c2a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.654187] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 736.654630] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 736.658022] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Deleting the datastore file [datastore1] 39421358-2d66-4fbe-a4e0-8fdb0b420c5e {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 736.658022] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-35f76d91-f77c-45f3-8a84-7cb6eabae41b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.666792] env[69927]: DEBUG oslo_vmware.api [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 736.666792] env[69927]: value = "task-4095582" [ 736.666792] env[69927]: _type = "Task" [ 736.666792] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.681398] env[69927]: DEBUG oslo_vmware.api [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095582, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.720920] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 736.720920] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 736.720920] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Deleting the datastore file [datastore2] 2cdfda66-1d93-4960-a129-2788f10fa593 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 736.720920] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-241e3f33-3b41-4836-b7a9-a6df647e22f0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.730347] env[69927]: DEBUG oslo_vmware.api [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Waiting for the task: (returnval){ [ 736.730347] env[69927]: value = "task-4095583" [ 736.730347] env[69927]: _type = "Task" [ 736.730347] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.743568] env[69927]: DEBUG oslo_vmware.api [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095583, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.842694] env[69927]: DEBUG nova.network.neutron [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 736.900173] env[69927]: DEBUG oslo_concurrency.lockutils [None req-42cf5530-a9a6-4592-8f5d-a2c041dffc3e tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Releasing lock "refresh_cache-9348e368-cc3c-4bde-91ae-26fd03ad536a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 736.900548] env[69927]: DEBUG nova.objects.instance [None req-42cf5530-a9a6-4592-8f5d-a2c041dffc3e tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lazy-loading 'migration_context' on Instance uuid 9348e368-cc3c-4bde-91ae-26fd03ad536a {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 736.987627] env[69927]: DEBUG oslo_vmware.api [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095579, 'name': Rename_Task, 'duration_secs': 0.241744} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.991884] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 736.995516] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-038e5453-60e8-472e-a44f-e1d478aa25c2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.007065] env[69927]: DEBUG oslo_vmware.api [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Task: {'id': task-4095576, 'name': PowerOnVM_Task, 'duration_secs': 0.786738} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.007486] env[69927]: DEBUG oslo_vmware.api [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 737.007486] env[69927]: value = "task-4095584" [ 737.007486] env[69927]: _type = "Task" [ 737.007486] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.007759] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 737.008014] env[69927]: INFO nova.compute.manager [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Took 6.97 seconds to spawn the instance on the hypervisor. [ 737.008236] env[69927]: DEBUG nova.compute.manager [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 737.009176] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4eb5dea-495b-4459-8457-e4112ea9b6df {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.016798] env[69927]: DEBUG nova.network.neutron [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Updating instance_info_cache with network_info: [{"id": "f02a1745-61d5-4414-88fe-680d9e7bba72", "address": "fa:16:3e:cb:2a:4d", "network": {"id": "f5504ce1-4b4d-41f8-90bc-de06d36f55a7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-747484643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "5e5c81a6491a4090b807b7328df7d8ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d650b26-c3e7-4de7-98db-5e4b816d123a", "external-id": "nsx-vlan-transportzone-757", "segmentation_id": 757, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf02a1745-61", "ovs_interfaceid": "f02a1745-61d5-4414-88fe-680d9e7bba72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.032925] env[69927]: DEBUG nova.compute.manager [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 737.033201] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 737.037166] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49dd09ee-e3c2-4f2e-b554-808f794c4fb4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.045946] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 737.046348] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2787e9c4-244d-4dd3-88f1-505b804e21b1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.131394] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 737.131651] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 737.131911] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Deleting the datastore file [datastore2] 128d0705-21a0-4103-ae84-85bbac7e718b {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 737.132254] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c36dd88-97a3-4314-9cf9-d1e00db5342d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.143030] env[69927]: DEBUG oslo_vmware.api [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 737.143030] env[69927]: value = "task-4095586" [ 737.143030] env[69927]: _type = "Task" [ 737.143030] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.155422] env[69927]: DEBUG oslo_vmware.api [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095586, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.181490] env[69927]: DEBUG oslo_vmware.api [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095582, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.324404} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.181797] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 737.182018] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 737.182246] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 737.182450] env[69927]: INFO nova.compute.manager [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Took 1.14 seconds to destroy the instance on the hypervisor. [ 737.182753] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 737.182971] env[69927]: DEBUG nova.compute.manager [-] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 737.184042] env[69927]: DEBUG nova.network.neutron [-] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 737.229848] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "80fc9add-683b-424e-9876-cdcae664e2da" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.230120] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "80fc9add-683b-424e-9876-cdcae664e2da" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.242239] env[69927]: DEBUG oslo_vmware.api [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Task: {'id': task-4095583, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.356978} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.242399] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 737.243936] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 737.243936] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 737.243936] env[69927]: INFO nova.compute.manager [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Took 1.15 seconds to destroy the instance on the hypervisor. [ 737.243936] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 737.243936] env[69927]: DEBUG nova.compute.manager [-] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 737.243936] env[69927]: DEBUG nova.network.neutron [-] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 737.250150] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6b15cb0-bc3c-4eec-8850-5eced4ebf5e3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.258590] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27ad9ae5-aa20-466f-87a3-fd0d1100df71 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.300063] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efbc8843-eae6-4f75-b1bf-c63e29d0bf28 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.309976] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8ee76d-8484-498e-90af-8f0ece48801f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.327182] env[69927]: DEBUG nova.compute.provider_tree [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 737.406672] env[69927]: DEBUG nova.objects.base [None req-42cf5530-a9a6-4592-8f5d-a2c041dffc3e tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Object Instance<9348e368-cc3c-4bde-91ae-26fd03ad536a> lazy-loaded attributes: info_cache,migration_context {{(pid=69927) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 737.406858] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc312de-2b17-429b-8a83-4f5e99fca167 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.429989] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c669b016-387b-45e2-a12e-8056a50dfe31 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.437982] env[69927]: DEBUG oslo_vmware.api [None req-42cf5530-a9a6-4592-8f5d-a2c041dffc3e tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 737.437982] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f94806-bb11-d454-4237-c66ccd399485" [ 737.437982] env[69927]: _type = "Task" [ 737.437982] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.448715] env[69927]: DEBUG oslo_vmware.api [None req-42cf5530-a9a6-4592-8f5d-a2c041dffc3e tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f94806-bb11-d454-4237-c66ccd399485, 'name': SearchDatastore_Task, 'duration_secs': 0.008421} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.449014] env[69927]: DEBUG oslo_concurrency.lockutils [None req-42cf5530-a9a6-4592-8f5d-a2c041dffc3e tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.520168] env[69927]: DEBUG oslo_vmware.api [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095584, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.523320] env[69927]: DEBUG oslo_concurrency.lockutils [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Releasing lock "refresh_cache-77c6ce9e-5e15-41e4-aa81-1ef01248aa32" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 737.523320] env[69927]: DEBUG nova.compute.manager [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Instance network_info: |[{"id": "f02a1745-61d5-4414-88fe-680d9e7bba72", "address": "fa:16:3e:cb:2a:4d", "network": {"id": "f5504ce1-4b4d-41f8-90bc-de06d36f55a7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-747484643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "5e5c81a6491a4090b807b7328df7d8ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d650b26-c3e7-4de7-98db-5e4b816d123a", "external-id": "nsx-vlan-transportzone-757", "segmentation_id": 757, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf02a1745-61", "ovs_interfaceid": "f02a1745-61d5-4414-88fe-680d9e7bba72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 737.523593] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:2a:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d650b26-c3e7-4de7-98db-5e4b816d123a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f02a1745-61d5-4414-88fe-680d9e7bba72', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 737.533251] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Creating folder: Project (5e5c81a6491a4090b807b7328df7d8ae). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 737.534011] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a90e4473-8f19-4df5-8a53-491ccdcf7418 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.540608] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 737.543114] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 737.546136] env[69927]: INFO nova.compute.manager [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Took 33.67 seconds to build instance. [ 737.551826] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Created folder: Project (5e5c81a6491a4090b807b7328df7d8ae) in parent group-v811283. [ 737.551826] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Creating folder: Instances. Parent ref: group-v811356. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 737.551826] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8e873881-1b70-40d1-b4ce-84eac2920cf2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.563474] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Created folder: Instances in parent group-v811356. [ 737.567024] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 737.567024] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 737.567024] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1f0ca403-5aa2-4efe-8e9a-edbc94e9a2da {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.590608] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 737.590608] env[69927]: value = "task-4095589" [ 737.590608] env[69927]: _type = "Task" [ 737.590608] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.603939] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095589, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.654088] env[69927]: DEBUG oslo_vmware.api [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095586, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.347673} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.654240] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 737.654380] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 737.654596] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 737.655079] env[69927]: INFO nova.compute.manager [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Took 0.62 seconds to destroy the instance on the hypervisor. [ 737.655259] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 737.655592] env[69927]: DEBUG nova.compute.manager [-] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 737.655720] env[69927]: DEBUG nova.network.neutron [-] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 737.830980] env[69927]: DEBUG nova.scheduler.client.report [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 738.019582] env[69927]: DEBUG oslo_vmware.api [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095584, 'name': PowerOnVM_Task, 'duration_secs': 0.853798} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.021845] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 738.022105] env[69927]: INFO nova.compute.manager [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Took 10.97 seconds to spawn the instance on the hypervisor. [ 738.022299] env[69927]: DEBUG nova.compute.manager [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 738.023286] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f10860-533d-4438-933a-4576ce7a1692 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.052143] env[69927]: DEBUG oslo_concurrency.lockutils [None req-95631204-6542-4289-8a8c-d325cd822d87 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Lock "e8e80be6-a82f-4cc5-92fd-366badf519b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.099s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 738.053640] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 738.054260] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 738.054260] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 738.054393] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 738.054850] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 738.054850] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 738.055771] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69927) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 738.055771] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager.update_available_resource {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 738.104914] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095589, 'name': CreateVM_Task, 'duration_secs': 0.379663} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.104914] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 738.106442] env[69927]: DEBUG oslo_concurrency.lockutils [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.106442] env[69927]: DEBUG oslo_concurrency.lockutils [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.106442] env[69927]: DEBUG oslo_concurrency.lockutils [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 738.106725] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f85437c3-0886-4078-93e6-3f1128097f13 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.114860] env[69927]: DEBUG oslo_vmware.api [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 738.114860] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d24100-4f81-5b3f-c83b-a6284b6683a1" [ 738.114860] env[69927]: _type = "Task" [ 738.114860] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.127171] env[69927]: DEBUG oslo_vmware.api [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d24100-4f81-5b3f-c83b-a6284b6683a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.151890] env[69927]: DEBUG nova.network.neutron [-] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.195360] env[69927]: DEBUG nova.network.neutron [-] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.336432] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.793s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 738.336960] env[69927]: DEBUG nova.compute.manager [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 738.341247] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.524s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 738.344373] env[69927]: INFO nova.compute.claims [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 738.429720] env[69927]: DEBUG nova.compute.manager [req-bad72edc-695b-4201-aad0-f5b2f8e1b639 req-66b5867c-7e23-4e41-beda-3238aa45f51a service nova] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Received event network-changed-f02a1745-61d5-4414-88fe-680d9e7bba72 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 738.429720] env[69927]: DEBUG nova.compute.manager [req-bad72edc-695b-4201-aad0-f5b2f8e1b639 req-66b5867c-7e23-4e41-beda-3238aa45f51a service nova] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Refreshing instance network info cache due to event network-changed-f02a1745-61d5-4414-88fe-680d9e7bba72. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 738.429720] env[69927]: DEBUG oslo_concurrency.lockutils [req-bad72edc-695b-4201-aad0-f5b2f8e1b639 req-66b5867c-7e23-4e41-beda-3238aa45f51a service nova] Acquiring lock "refresh_cache-77c6ce9e-5e15-41e4-aa81-1ef01248aa32" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.429720] env[69927]: DEBUG oslo_concurrency.lockutils [req-bad72edc-695b-4201-aad0-f5b2f8e1b639 req-66b5867c-7e23-4e41-beda-3238aa45f51a service nova] Acquired lock "refresh_cache-77c6ce9e-5e15-41e4-aa81-1ef01248aa32" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.429720] env[69927]: DEBUG nova.network.neutron [req-bad72edc-695b-4201-aad0-f5b2f8e1b639 req-66b5867c-7e23-4e41-beda-3238aa45f51a service nova] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Refreshing network info cache for port f02a1745-61d5-4414-88fe-680d9e7bba72 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 738.531959] env[69927]: DEBUG nova.network.neutron [-] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.547496] env[69927]: INFO nova.compute.manager [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Took 37.43 seconds to build instance. [ 738.556902] env[69927]: DEBUG nova.compute.manager [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 738.565156] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 738.626036] env[69927]: DEBUG oslo_vmware.api [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d24100-4f81-5b3f-c83b-a6284b6683a1, 'name': SearchDatastore_Task, 'duration_secs': 0.013053} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.626417] env[69927]: DEBUG oslo_concurrency.lockutils [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.626662] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 738.626894] env[69927]: DEBUG oslo_concurrency.lockutils [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.627049] env[69927]: DEBUG oslo_concurrency.lockutils [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.627310] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 738.627603] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8b526c3-5aa6-4542-99e4-ec663386b1cd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.638329] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 738.638574] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 738.639557] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a539a00e-0393-4772-aabe-fe6aa9fec43a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.647863] env[69927]: DEBUG oslo_vmware.api [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 738.647863] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5283e5e5-4409-dea2-a401-3d4684b448ee" [ 738.647863] env[69927]: _type = "Task" [ 738.647863] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.655112] env[69927]: INFO nova.compute.manager [-] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Took 1.47 seconds to deallocate network for instance. [ 738.666405] env[69927]: DEBUG oslo_vmware.api [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5283e5e5-4409-dea2-a401-3d4684b448ee, 'name': SearchDatastore_Task, 'duration_secs': 0.012636} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.667830] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bba6f3cd-a252-4d97-8082-4504b0233be4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.679632] env[69927]: DEBUG oslo_vmware.api [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 738.679632] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5244770a-3693-890d-4ba2-78be35e85364" [ 738.679632] env[69927]: _type = "Task" [ 738.679632] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.690047] env[69927]: DEBUG oslo_vmware.api [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5244770a-3693-890d-4ba2-78be35e85364, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.697303] env[69927]: INFO nova.compute.manager [-] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Took 1.45 seconds to deallocate network for instance. [ 738.850731] env[69927]: DEBUG nova.compute.utils [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 738.855950] env[69927]: DEBUG nova.compute.manager [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 738.856148] env[69927]: DEBUG nova.network.neutron [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 738.901820] env[69927]: DEBUG nova.policy [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5f7d56a8bd67480588bfffededc24e32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a204748f06124b1ba0d9657b87dc3591', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 739.034751] env[69927]: INFO nova.compute.manager [-] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Took 1.38 seconds to deallocate network for instance. [ 739.050653] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2baa917f-d7d1-4510-a71c-29b6a2d7e699 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lock "a536b069-45e0-4ffe-be53-ac33f8cb6ec0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.167s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.068796] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquiring lock "b1bcbcfb-2320-434c-901f-0f6a476a3069" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.069225] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "b1bcbcfb-2320-434c-901f-0f6a476a3069" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.069652] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquiring lock "b1bcbcfb-2320-434c-901f-0f6a476a3069-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.070083] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "b1bcbcfb-2320-434c-901f-0f6a476a3069-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.070595] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "b1bcbcfb-2320-434c-901f-0f6a476a3069-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.079187] env[69927]: INFO nova.compute.manager [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Terminating instance [ 739.099969] env[69927]: DEBUG oslo_concurrency.lockutils [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.168491] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.189828] env[69927]: DEBUG oslo_vmware.api [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5244770a-3693-890d-4ba2-78be35e85364, 'name': SearchDatastore_Task, 'duration_secs': 0.029597} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.190130] env[69927]: DEBUG oslo_concurrency.lockutils [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 739.190392] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 77c6ce9e-5e15-41e4-aa81-1ef01248aa32/77c6ce9e-5e15-41e4-aa81-1ef01248aa32.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 739.190659] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09985fb1-d72f-4aff-b058-9e69a4ce49ef {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.199346] env[69927]: DEBUG oslo_vmware.api [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 739.199346] env[69927]: value = "task-4095590" [ 739.199346] env[69927]: _type = "Task" [ 739.199346] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.208731] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.214866] env[69927]: DEBUG oslo_vmware.api [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095590, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.278114] env[69927]: DEBUG nova.network.neutron [req-bad72edc-695b-4201-aad0-f5b2f8e1b639 req-66b5867c-7e23-4e41-beda-3238aa45f51a service nova] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Updated VIF entry in instance network info cache for port f02a1745-61d5-4414-88fe-680d9e7bba72. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 739.278759] env[69927]: DEBUG nova.network.neutron [req-bad72edc-695b-4201-aad0-f5b2f8e1b639 req-66b5867c-7e23-4e41-beda-3238aa45f51a service nova] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Updating instance_info_cache with network_info: [{"id": "f02a1745-61d5-4414-88fe-680d9e7bba72", "address": "fa:16:3e:cb:2a:4d", "network": {"id": "f5504ce1-4b4d-41f8-90bc-de06d36f55a7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-747484643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "5e5c81a6491a4090b807b7328df7d8ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d650b26-c3e7-4de7-98db-5e4b816d123a", "external-id": "nsx-vlan-transportzone-757", "segmentation_id": 757, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf02a1745-61", "ovs_interfaceid": "f02a1745-61d5-4414-88fe-680d9e7bba72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.357117] env[69927]: DEBUG nova.compute.manager [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 739.379958] env[69927]: DEBUG nova.network.neutron [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Successfully created port: fef5adda-8c0b-4f55-8d26-a44ba11e36ce {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 739.535551] env[69927]: DEBUG nova.compute.manager [None req-c8e08e06-5cde-4eec-8128-047e9e127a09 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 739.536773] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-544c1d43-e3dd-43a2-8d74-34a87b8b1f95 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.547425] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.553262] env[69927]: DEBUG nova.compute.manager [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 739.584648] env[69927]: DEBUG nova.compute.manager [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 739.584648] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 739.588632] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d17fd5c-92c5-4f9d-bcc6-e61010b11529 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.602490] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 739.602490] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2fd85618-799c-47a1-a088-5a59045a7272 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.612965] env[69927]: DEBUG oslo_vmware.api [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 739.612965] env[69927]: value = "task-4095591" [ 739.612965] env[69927]: _type = "Task" [ 739.612965] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.626480] env[69927]: DEBUG oslo_vmware.api [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095591, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.713344] env[69927]: DEBUG oslo_vmware.api [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095590, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.736766] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquiring lock "ab8a8acc-cab7-4a82-bd90-b34147f17b0e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.737559] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "ab8a8acc-cab7-4a82-bd90-b34147f17b0e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.738043] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquiring lock "ab8a8acc-cab7-4a82-bd90-b34147f17b0e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.738578] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "ab8a8acc-cab7-4a82-bd90-b34147f17b0e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.739205] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "ab8a8acc-cab7-4a82-bd90-b34147f17b0e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.744521] env[69927]: INFO nova.compute.manager [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Terminating instance [ 739.781869] env[69927]: DEBUG oslo_concurrency.lockutils [req-bad72edc-695b-4201-aad0-f5b2f8e1b639 req-66b5867c-7e23-4e41-beda-3238aa45f51a service nova] Releasing lock "refresh_cache-77c6ce9e-5e15-41e4-aa81-1ef01248aa32" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 739.782244] env[69927]: DEBUG nova.compute.manager [req-bad72edc-695b-4201-aad0-f5b2f8e1b639 req-66b5867c-7e23-4e41-beda-3238aa45f51a service nova] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Received event network-vif-deleted-85ea851a-ed88-452f-acd1-8e025fdeaeb0 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 739.782471] env[69927]: DEBUG nova.compute.manager [req-bad72edc-695b-4201-aad0-f5b2f8e1b639 req-66b5867c-7e23-4e41-beda-3238aa45f51a service nova] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Received event network-vif-deleted-ee4c8bc2-33ea-425e-adbc-5a75ab428943 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 739.988419] env[69927]: DEBUG oslo_concurrency.lockutils [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Acquiring lock "e8e80be6-a82f-4cc5-92fd-366badf519b8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.988856] env[69927]: DEBUG oslo_concurrency.lockutils [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Lock "e8e80be6-a82f-4cc5-92fd-366badf519b8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.989169] env[69927]: DEBUG oslo_concurrency.lockutils [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Acquiring lock "e8e80be6-a82f-4cc5-92fd-366badf519b8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.990394] env[69927]: DEBUG oslo_concurrency.lockutils [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Lock "e8e80be6-a82f-4cc5-92fd-366badf519b8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.990394] env[69927]: DEBUG oslo_concurrency.lockutils [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Lock "e8e80be6-a82f-4cc5-92fd-366badf519b8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.993857] env[69927]: INFO nova.compute.manager [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Terminating instance [ 740.059951] env[69927]: INFO nova.compute.manager [None req-c8e08e06-5cde-4eec-8128-047e9e127a09 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] instance snapshotting [ 740.061159] env[69927]: DEBUG nova.objects.instance [None req-c8e08e06-5cde-4eec-8128-047e9e127a09 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Lazy-loading 'flavor' on Instance uuid e8e80be6-a82f-4cc5-92fd-366badf519b8 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 740.083390] env[69927]: DEBUG oslo_concurrency.lockutils [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 740.098969] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4aa8a86-b8bd-4fd2-a7d4-a68fe1765c28 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.109975] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd54e8f-8de7-4cd1-938e-39760802af3b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.124261] env[69927]: DEBUG oslo_vmware.api [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095591, 'name': PowerOffVM_Task, 'duration_secs': 0.381058} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.153505] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 740.153505] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 740.153505] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f62c10e5-a0cb-48f3-b90f-5874ddfb2128 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.155743] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02475386-841d-4e45-bf4f-9281dbfd7a05 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.164274] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6abf3d7-cd44-4734-a996-97fa64a1b1a8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.957480] env[69927]: DEBUG nova.compute.manager [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 740.957761] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 740.958953] env[69927]: DEBUG nova.compute.manager [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 740.961477] env[69927]: DEBUG oslo_concurrency.lockutils [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Acquiring lock "refresh_cache-e8e80be6-a82f-4cc5-92fd-366badf519b8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.961876] env[69927]: DEBUG oslo_concurrency.lockutils [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Acquired lock "refresh_cache-e8e80be6-a82f-4cc5-92fd-366badf519b8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 740.961876] env[69927]: DEBUG nova.network.neutron [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 740.966931] env[69927]: DEBUG nova.compute.provider_tree [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 740.974030] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f260b3c1-5b5d-4410-82c0-105cce26d652 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.979417] env[69927]: DEBUG nova.compute.manager [req-0710638e-ada6-4ab8-9c1b-e044895dc235 req-5256e196-e364-44c5-ab0a-5ad175df63b2 service nova] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Received event network-vif-deleted-e4473f09-99e5-4ac3-b9cc-3316e06abd65 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 740.980799] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-905583ab-09e4-488d-a5b2-83827af50b17 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.983514] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 740.983717] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 740.983942] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Deleting the datastore file [datastore1] b1bcbcfb-2320-434c-901f-0f6a476a3069 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 740.985758] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-59488ee4-9c26-4ee7-84e0-78a30c60e6ae {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.009668] env[69927]: DEBUG oslo_vmware.api [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095590, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.642895} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.014385] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c150ecf4-02f6-446a-ad79-7b82ed3bce71 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.017521] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 77c6ce9e-5e15-41e4-aa81-1ef01248aa32/77c6ce9e-5e15-41e4-aa81-1ef01248aa32.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 741.017792] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 741.018522] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 741.021036] env[69927]: DEBUG nova.virt.hardware [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 741.021298] env[69927]: DEBUG nova.virt.hardware [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 741.021499] env[69927]: DEBUG nova.virt.hardware [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 741.021679] env[69927]: DEBUG nova.virt.hardware [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 741.022134] env[69927]: DEBUG nova.virt.hardware [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 741.022134] env[69927]: DEBUG nova.virt.hardware [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 741.022332] env[69927]: DEBUG nova.virt.hardware [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 741.022675] env[69927]: DEBUG nova.virt.hardware [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 741.022675] env[69927]: DEBUG nova.virt.hardware [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 741.023426] env[69927]: DEBUG nova.virt.hardware [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 741.023426] env[69927]: DEBUG nova.virt.hardware [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 741.023835] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9fa07b24-37fb-4e22-b5cc-8af204808302 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.027064] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f20043c4-466f-407e-a246-32456f8efea6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.028859] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd5b00b-9c6c-4078-8b8a-6ecc06abaca2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.034040] env[69927]: DEBUG oslo_vmware.api [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 741.034040] env[69927]: value = "task-4095593" [ 741.034040] env[69927]: _type = "Task" [ 741.034040] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.035323] env[69927]: DEBUG nova.network.neutron [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Successfully updated port: fef5adda-8c0b-4f55-8d26-a44ba11e36ce {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 741.051815] env[69927]: DEBUG oslo_vmware.api [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 741.051815] env[69927]: value = "task-4095595" [ 741.051815] env[69927]: _type = "Task" [ 741.051815] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.052114] env[69927]: DEBUG oslo_vmware.api [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 741.052114] env[69927]: value = "task-4095594" [ 741.052114] env[69927]: _type = "Task" [ 741.052114] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.069925] env[69927]: DEBUG oslo_vmware.api [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095593, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.072505] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db27da5-c6ee-4aeb-b263-bef89e5cb392 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.085531] env[69927]: DEBUG oslo_vmware.api [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095595, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.086065] env[69927]: DEBUG oslo_vmware.api [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095594, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.485806] env[69927]: DEBUG nova.scheduler.client.report [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 741.492514] env[69927]: DEBUG nova.network.neutron [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 741.565431] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Acquiring lock "refresh_cache-cdf0ea6e-d884-49c1-87ec-cd6de1376c7f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.565431] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Acquired lock "refresh_cache-cdf0ea6e-d884-49c1-87ec-cd6de1376c7f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 741.565431] env[69927]: DEBUG nova.network.neutron [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 741.565431] env[69927]: DEBUG oslo_vmware.api [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095593, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.303316} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.565431] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 741.565909] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 741.565909] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 741.565909] env[69927]: INFO nova.compute.manager [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Took 1.97 seconds to destroy the instance on the hypervisor. [ 741.565909] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 741.565909] env[69927]: DEBUG nova.compute.manager [-] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 741.566240] env[69927]: DEBUG nova.network.neutron [-] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 741.566240] env[69927]: DEBUG nova.compute.manager [None req-c8e08e06-5cde-4eec-8128-047e9e127a09 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Instance disappeared during snapshot {{(pid=69927) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 741.581212] env[69927]: DEBUG oslo_vmware.api [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095595, 'name': PowerOffVM_Task, 'duration_secs': 0.367835} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.582387] env[69927]: DEBUG oslo_vmware.api [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095594, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081993} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.585505] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 741.585590] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 741.585894] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 741.586394] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a5e951d1-d3b1-4ad1-9166-16820b45c6d5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.592753] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-068ccf2f-48b4-48f9-a96d-96c75ed7afef {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.609692] env[69927]: DEBUG nova.network.neutron [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.638133] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] 77c6ce9e-5e15-41e4-aa81-1ef01248aa32/77c6ce9e-5e15-41e4-aa81-1ef01248aa32.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 741.644240] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f9d31e5-7271-4e54-946a-e0a63aeba4f7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.670445] env[69927]: DEBUG oslo_vmware.api [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 741.670445] env[69927]: value = "task-4095597" [ 741.670445] env[69927]: _type = "Task" [ 741.670445] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.686215] env[69927]: DEBUG oslo_vmware.api [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095597, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.700576] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 741.700988] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 741.701260] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Deleting the datastore file [datastore1] ab8a8acc-cab7-4a82-bd90-b34147f17b0e {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 741.701650] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca35eba8-ffbc-4a21-901f-df8ece79a489 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.711241] env[69927]: DEBUG oslo_vmware.api [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for the task: (returnval){ [ 741.711241] env[69927]: value = "task-4095598" [ 741.711241] env[69927]: _type = "Task" [ 741.711241] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.726500] env[69927]: DEBUG oslo_vmware.api [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095598, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.846727] env[69927]: DEBUG nova.compute.manager [None req-c8e08e06-5cde-4eec-8128-047e9e127a09 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Found 0 images (rotation: 2) {{(pid=69927) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 741.991740] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.650s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 741.992378] env[69927]: DEBUG nova.compute.manager [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 741.996089] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.275s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.996449] env[69927]: DEBUG nova.objects.instance [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Lazy-loading 'resources' on Instance uuid 8edafb98-331a-45b8-8de8-4ba04b035ffd {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 742.116171] env[69927]: DEBUG oslo_concurrency.lockutils [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Releasing lock "refresh_cache-e8e80be6-a82f-4cc5-92fd-366badf519b8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 742.116171] env[69927]: DEBUG nova.compute.manager [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 742.116171] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 742.116171] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a5db2e-5284-463a-b9ac-f3aa6ccf2a56 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.125388] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 742.125663] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4fcc04b8-7c70-4d6b-8c82-e82d01d0bad1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.135048] env[69927]: DEBUG oslo_vmware.api [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Waiting for the task: (returnval){ [ 742.135048] env[69927]: value = "task-4095599" [ 742.135048] env[69927]: _type = "Task" [ 742.135048] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.139792] env[69927]: DEBUG nova.network.neutron [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 742.146197] env[69927]: DEBUG oslo_vmware.api [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Task: {'id': task-4095599, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.187284] env[69927]: DEBUG oslo_vmware.api [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095597, 'name': ReconfigVM_Task, 'duration_secs': 0.317608} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.187284] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Reconfigured VM instance instance-00000017 to attach disk [datastore1] 77c6ce9e-5e15-41e4-aa81-1ef01248aa32/77c6ce9e-5e15-41e4-aa81-1ef01248aa32.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 742.187284] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f2f70630-0b1f-44f3-8eee-d9c2783f925e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.194181] env[69927]: DEBUG oslo_vmware.api [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 742.194181] env[69927]: value = "task-4095600" [ 742.194181] env[69927]: _type = "Task" [ 742.194181] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.203789] env[69927]: DEBUG oslo_vmware.api [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095600, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.224915] env[69927]: DEBUG oslo_vmware.api [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Task: {'id': task-4095598, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177628} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.225031] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 742.225293] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 742.225623] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 742.225914] env[69927]: INFO nova.compute.manager [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Took 1.27 seconds to destroy the instance on the hypervisor. [ 742.226404] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 742.226767] env[69927]: DEBUG nova.compute.manager [-] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 742.226887] env[69927]: DEBUG nova.network.neutron [-] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 742.461182] env[69927]: DEBUG nova.network.neutron [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Updating instance_info_cache with network_info: [{"id": "fef5adda-8c0b-4f55-8d26-a44ba11e36ce", "address": "fa:16:3e:60:db:c0", "network": {"id": "ab70d14c-f879-411a-88f1-96eef5d67694", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-684521697-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a204748f06124b1ba0d9657b87dc3591", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfef5adda-8c", "ovs_interfaceid": "fef5adda-8c0b-4f55-8d26-a44ba11e36ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.500640] env[69927]: DEBUG nova.compute.utils [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 742.504719] env[69927]: DEBUG nova.compute.manager [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 742.504719] env[69927]: DEBUG nova.network.neutron [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 742.634967] env[69927]: DEBUG nova.network.neutron [-] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.647640] env[69927]: DEBUG oslo_vmware.api [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Task: {'id': task-4095599, 'name': PowerOffVM_Task, 'duration_secs': 0.225329} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.647640] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 742.647640] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 742.648260] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-205962bd-b5bc-4586-b818-4af9ec8ef514 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.682818] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 742.683049] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 742.687129] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Deleting the datastore file [datastore1] e8e80be6-a82f-4cc5-92fd-366badf519b8 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 742.687486] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a62ebed1-5d9d-4e8a-84e8-b8ae09b6a2e6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.698020] env[69927]: DEBUG oslo_vmware.api [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Waiting for the task: (returnval){ [ 742.698020] env[69927]: value = "task-4095602" [ 742.698020] env[69927]: _type = "Task" [ 742.698020] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.710656] env[69927]: DEBUG oslo_vmware.api [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Task: {'id': task-4095602, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.720020] env[69927]: DEBUG oslo_vmware.api [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095600, 'name': Rename_Task, 'duration_secs': 0.158976} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.720312] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 742.720802] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0543f53f-84b7-4521-adab-fd9783a57556 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.730214] env[69927]: DEBUG oslo_vmware.api [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 742.730214] env[69927]: value = "task-4095603" [ 742.730214] env[69927]: _type = "Task" [ 742.730214] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.740334] env[69927]: DEBUG oslo_vmware.api [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095603, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.820131] env[69927]: DEBUG nova.compute.manager [req-f9dd832b-717b-41b0-b6be-596f8d093ef8 req-d047b579-5e67-4037-a7f6-98b02bffbdd2 service nova] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Received event network-vif-plugged-fef5adda-8c0b-4f55-8d26-a44ba11e36ce {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 742.820436] env[69927]: DEBUG oslo_concurrency.lockutils [req-f9dd832b-717b-41b0-b6be-596f8d093ef8 req-d047b579-5e67-4037-a7f6-98b02bffbdd2 service nova] Acquiring lock "cdf0ea6e-d884-49c1-87ec-cd6de1376c7f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.820570] env[69927]: DEBUG oslo_concurrency.lockutils [req-f9dd832b-717b-41b0-b6be-596f8d093ef8 req-d047b579-5e67-4037-a7f6-98b02bffbdd2 service nova] Lock "cdf0ea6e-d884-49c1-87ec-cd6de1376c7f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.823022] env[69927]: DEBUG oslo_concurrency.lockutils [req-f9dd832b-717b-41b0-b6be-596f8d093ef8 req-d047b579-5e67-4037-a7f6-98b02bffbdd2 service nova] Lock "cdf0ea6e-d884-49c1-87ec-cd6de1376c7f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.823022] env[69927]: DEBUG nova.compute.manager [req-f9dd832b-717b-41b0-b6be-596f8d093ef8 req-d047b579-5e67-4037-a7f6-98b02bffbdd2 service nova] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] No waiting events found dispatching network-vif-plugged-fef5adda-8c0b-4f55-8d26-a44ba11e36ce {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 742.823022] env[69927]: WARNING nova.compute.manager [req-f9dd832b-717b-41b0-b6be-596f8d093ef8 req-d047b579-5e67-4037-a7f6-98b02bffbdd2 service nova] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Received unexpected event network-vif-plugged-fef5adda-8c0b-4f55-8d26-a44ba11e36ce for instance with vm_state building and task_state spawning. [ 742.823022] env[69927]: DEBUG nova.compute.manager [req-f9dd832b-717b-41b0-b6be-596f8d093ef8 req-d047b579-5e67-4037-a7f6-98b02bffbdd2 service nova] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Received event network-changed-fef5adda-8c0b-4f55-8d26-a44ba11e36ce {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 742.823022] env[69927]: DEBUG nova.compute.manager [req-f9dd832b-717b-41b0-b6be-596f8d093ef8 req-d047b579-5e67-4037-a7f6-98b02bffbdd2 service nova] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Refreshing instance network info cache due to event network-changed-fef5adda-8c0b-4f55-8d26-a44ba11e36ce. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 742.823410] env[69927]: DEBUG oslo_concurrency.lockutils [req-f9dd832b-717b-41b0-b6be-596f8d093ef8 req-d047b579-5e67-4037-a7f6-98b02bffbdd2 service nova] Acquiring lock "refresh_cache-cdf0ea6e-d884-49c1-87ec-cd6de1376c7f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.836103] env[69927]: DEBUG nova.policy [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '86abcc2a262949b48a37cc485021ff0b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '65f02266005e44b1a2d8d2eddec9795b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 742.964298] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Releasing lock "refresh_cache-cdf0ea6e-d884-49c1-87ec-cd6de1376c7f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 742.964668] env[69927]: DEBUG nova.compute.manager [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Instance network_info: |[{"id": "fef5adda-8c0b-4f55-8d26-a44ba11e36ce", "address": "fa:16:3e:60:db:c0", "network": {"id": "ab70d14c-f879-411a-88f1-96eef5d67694", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-684521697-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a204748f06124b1ba0d9657b87dc3591", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfef5adda-8c", "ovs_interfaceid": "fef5adda-8c0b-4f55-8d26-a44ba11e36ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 742.970035] env[69927]: DEBUG oslo_concurrency.lockutils [req-f9dd832b-717b-41b0-b6be-596f8d093ef8 req-d047b579-5e67-4037-a7f6-98b02bffbdd2 service nova] Acquired lock "refresh_cache-cdf0ea6e-d884-49c1-87ec-cd6de1376c7f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 742.970260] env[69927]: DEBUG nova.network.neutron [req-f9dd832b-717b-41b0-b6be-596f8d093ef8 req-d047b579-5e67-4037-a7f6-98b02bffbdd2 service nova] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Refreshing network info cache for port fef5adda-8c0b-4f55-8d26-a44ba11e36ce {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 742.972202] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:db:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '07e9bef1-2b0e-4e4d-997f-de71bb0e213a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fef5adda-8c0b-4f55-8d26-a44ba11e36ce', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 742.981421] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Creating folder: Project (a204748f06124b1ba0d9657b87dc3591). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 742.982893] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a3692924-8750-4911-b5e4-92807670a9b3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.995595] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Created folder: Project (a204748f06124b1ba0d9657b87dc3591) in parent group-v811283. [ 742.995847] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Creating folder: Instances. Parent ref: group-v811359. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 742.996085] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1320556b-3e7d-48e8-a27e-0f58ed371b03 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.005569] env[69927]: DEBUG nova.compute.manager [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 743.010827] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Created folder: Instances in parent group-v811359. [ 743.010827] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 743.011840] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 743.011840] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-84e9d5ab-723f-47ee-afdf-0f8d7a547066 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.039069] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 743.039069] env[69927]: value = "task-4095606" [ 743.039069] env[69927]: _type = "Task" [ 743.039069] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.051942] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095606, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.143058] env[69927]: INFO nova.compute.manager [-] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Took 1.59 seconds to deallocate network for instance. [ 743.201487] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9def9d9a-b1b1-48fd-a718-ab7cca4afa08 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.222775] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f35dec54-9377-4859-95a9-d1d506a305c2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.223296] env[69927]: DEBUG oslo_vmware.api [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Task: {'id': task-4095602, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.303199} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.225711] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 743.225711] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 743.225711] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 743.225711] env[69927]: INFO nova.compute.manager [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Took 1.11 seconds to destroy the instance on the hypervisor. [ 743.225711] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 743.226268] env[69927]: DEBUG nova.compute.manager [-] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 743.226310] env[69927]: DEBUG nova.network.neutron [-] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 743.256586] env[69927]: DEBUG nova.network.neutron [-] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 743.262604] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba807f8-a150-4e81-bd6a-e8fd22a1e013 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.275837] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4823b365-ba74-4c6b-95a2-6f0a11691aa0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.280330] env[69927]: DEBUG oslo_vmware.api [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095603, 'name': PowerOnVM_Task, 'duration_secs': 0.530516} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.280945] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 743.281208] env[69927]: INFO nova.compute.manager [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Took 8.11 seconds to spawn the instance on the hypervisor. [ 743.281390] env[69927]: DEBUG nova.compute.manager [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 743.283383] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fb96346-3819-4476-84a1-000a83c4e565 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.294430] env[69927]: DEBUG nova.compute.provider_tree [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 743.353768] env[69927]: DEBUG nova.network.neutron [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Successfully created port: 0d02a2be-1a9c-48c3-93c8-28b312303384 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 743.510150] env[69927]: DEBUG nova.network.neutron [-] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.555752] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095606, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.655094] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.774550] env[69927]: DEBUG nova.network.neutron [-] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.788210] env[69927]: DEBUG nova.network.neutron [req-f9dd832b-717b-41b0-b6be-596f8d093ef8 req-d047b579-5e67-4037-a7f6-98b02bffbdd2 service nova] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Updated VIF entry in instance network info cache for port fef5adda-8c0b-4f55-8d26-a44ba11e36ce. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 743.788607] env[69927]: DEBUG nova.network.neutron [req-f9dd832b-717b-41b0-b6be-596f8d093ef8 req-d047b579-5e67-4037-a7f6-98b02bffbdd2 service nova] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Updating instance_info_cache with network_info: [{"id": "fef5adda-8c0b-4f55-8d26-a44ba11e36ce", "address": "fa:16:3e:60:db:c0", "network": {"id": "ab70d14c-f879-411a-88f1-96eef5d67694", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-684521697-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a204748f06124b1ba0d9657b87dc3591", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfef5adda-8c", "ovs_interfaceid": "fef5adda-8c0b-4f55-8d26-a44ba11e36ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.797542] env[69927]: DEBUG nova.scheduler.client.report [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 743.811169] env[69927]: INFO nova.compute.manager [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Took 37.31 seconds to build instance. [ 744.013700] env[69927]: INFO nova.compute.manager [-] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Took 1.79 seconds to deallocate network for instance. [ 744.021789] env[69927]: DEBUG nova.compute.manager [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 744.059130] env[69927]: DEBUG nova.virt.hardware [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 744.059130] env[69927]: DEBUG nova.virt.hardware [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 744.059130] env[69927]: DEBUG nova.virt.hardware [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 744.059313] env[69927]: DEBUG nova.virt.hardware [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 744.059313] env[69927]: DEBUG nova.virt.hardware [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 744.063674] env[69927]: DEBUG nova.virt.hardware [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 744.064060] env[69927]: DEBUG nova.virt.hardware [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 744.064194] env[69927]: DEBUG nova.virt.hardware [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 744.064345] env[69927]: DEBUG nova.virt.hardware [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 744.064504] env[69927]: DEBUG nova.virt.hardware [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 744.064671] env[69927]: DEBUG nova.virt.hardware [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 744.065594] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0b04eed-fbe3-4ab5-8d99-9af5e7d443c3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.075654] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095606, 'name': CreateVM_Task, 'duration_secs': 0.595976} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.076293] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 744.077477] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.077477] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 744.077785] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 744.078062] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d65b700-b007-47d8-a900-5ba0d4f041fd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.085213] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caecb11d-de6f-40b1-a73b-0e08741e95ea {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.090845] env[69927]: DEBUG oslo_vmware.api [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Waiting for the task: (returnval){ [ 744.090845] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529801fd-ca04-af8c-1597-6ebd5e68671e" [ 744.090845] env[69927]: _type = "Task" [ 744.090845] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.108548] env[69927]: DEBUG oslo_vmware.api [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529801fd-ca04-af8c-1597-6ebd5e68671e, 'name': SearchDatastore_Task, 'duration_secs': 0.014085} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.108846] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 744.109093] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 744.109323] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.109468] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 744.109642] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 744.109897] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3f7b353b-9b12-4cad-9937-7e0b263762a8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.119112] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 744.119304] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 744.119991] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab01cb09-e0e2-433d-8fda-df65deffb691 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.126124] env[69927]: DEBUG oslo_vmware.api [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Waiting for the task: (returnval){ [ 744.126124] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522f7877-8906-5412-1526-a62b8e3490ec" [ 744.126124] env[69927]: _type = "Task" [ 744.126124] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.137720] env[69927]: DEBUG oslo_vmware.api [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522f7877-8906-5412-1526-a62b8e3490ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.277015] env[69927]: INFO nova.compute.manager [-] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Took 1.05 seconds to deallocate network for instance. [ 744.294599] env[69927]: DEBUG oslo_concurrency.lockutils [req-f9dd832b-717b-41b0-b6be-596f8d093ef8 req-d047b579-5e67-4037-a7f6-98b02bffbdd2 service nova] Releasing lock "refresh_cache-cdf0ea6e-d884-49c1-87ec-cd6de1376c7f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 744.294784] env[69927]: DEBUG nova.compute.manager [req-f9dd832b-717b-41b0-b6be-596f8d093ef8 req-d047b579-5e67-4037-a7f6-98b02bffbdd2 service nova] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Received event network-vif-deleted-5aeee040-2531-4d68-871a-2f65a93ad448 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 744.302919] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.307s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.305409] env[69927]: DEBUG oslo_concurrency.lockutils [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.898s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 744.309267] env[69927]: INFO nova.compute.claims [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 744.313650] env[69927]: DEBUG oslo_concurrency.lockutils [None req-79168f04-755b-436c-b335-23e7fa566ab2 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Lock "77c6ce9e-5e15-41e4-aa81-1ef01248aa32" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.188s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.349909] env[69927]: INFO nova.scheduler.client.report [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Deleted allocations for instance 8edafb98-331a-45b8-8de8-4ba04b035ffd [ 744.522531] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 744.643019] env[69927]: DEBUG oslo_vmware.api [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522f7877-8906-5412-1526-a62b8e3490ec, 'name': SearchDatastore_Task, 'duration_secs': 0.012763} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.645258] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d43b32eb-4767-43ec-931c-b3db639efa35 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.656480] env[69927]: DEBUG oslo_vmware.api [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Waiting for the task: (returnval){ [ 744.656480] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5261ecac-acde-7844-16fd-ffec589ef694" [ 744.656480] env[69927]: _type = "Task" [ 744.656480] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.668947] env[69927]: DEBUG oslo_vmware.api [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5261ecac-acde-7844-16fd-ffec589ef694, 'name': SearchDatastore_Task, 'duration_secs': 0.010914} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.669738] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 744.670136] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] cdf0ea6e-d884-49c1-87ec-cd6de1376c7f/cdf0ea6e-d884-49c1-87ec-cd6de1376c7f.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 744.670816] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7b5b49c3-c8d9-4d02-849d-d6c968e364b3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.679891] env[69927]: DEBUG oslo_vmware.api [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Waiting for the task: (returnval){ [ 744.679891] env[69927]: value = "task-4095607" [ 744.679891] env[69927]: _type = "Task" [ 744.679891] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.693036] env[69927]: DEBUG oslo_vmware.api [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Task: {'id': task-4095607, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.788794] env[69927]: DEBUG oslo_concurrency.lockutils [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 744.818582] env[69927]: DEBUG nova.compute.manager [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 744.862797] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dfd5e155-ee25-4f2c-bec7-8904915f25e2 tempest-ServerDiagnosticsTest-2111870055 tempest-ServerDiagnosticsTest-2111870055-project-member] Lock "8edafb98-331a-45b8-8de8-4ba04b035ffd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.761s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.981659] env[69927]: DEBUG nova.compute.manager [req-72aa6355-e179-4a3a-851a-d3d4f952d1f2 req-75805697-7ad6-4693-8251-e18e6e1e2cdc service nova] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Received event network-vif-deleted-50ea3bc5-fa5d-49db-99d9-e842cb85c0d8 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 745.141966] env[69927]: DEBUG nova.network.neutron [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Successfully updated port: 0d02a2be-1a9c-48c3-93c8-28b312303384 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 745.151358] env[69927]: INFO nova.compute.manager [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Rescuing [ 745.151358] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquiring lock "refresh_cache-77c6ce9e-5e15-41e4-aa81-1ef01248aa32" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.151528] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquired lock "refresh_cache-77c6ce9e-5e15-41e4-aa81-1ef01248aa32" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 745.151687] env[69927]: DEBUG nova.network.neutron [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 745.193892] env[69927]: DEBUG oslo_vmware.api [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Task: {'id': task-4095607, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471325} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.193892] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] cdf0ea6e-d884-49c1-87ec-cd6de1376c7f/cdf0ea6e-d884-49c1-87ec-cd6de1376c7f.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 745.193892] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 745.194175] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d7560874-2b65-4c91-94bf-6dbcaf72672d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.204161] env[69927]: DEBUG oslo_vmware.api [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Waiting for the task: (returnval){ [ 745.204161] env[69927]: value = "task-4095608" [ 745.204161] env[69927]: _type = "Task" [ 745.204161] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.214657] env[69927]: DEBUG oslo_vmware.api [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Task: {'id': task-4095608, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.350841] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.648039] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Acquiring lock "refresh_cache-14359034-232d-478f-bf65-cf9937c59229" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.648039] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Acquired lock "refresh_cache-14359034-232d-478f-bf65-cf9937c59229" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 745.648039] env[69927]: DEBUG nova.network.neutron [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 745.720755] env[69927]: DEBUG oslo_vmware.api [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Task: {'id': task-4095608, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.225742} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.720755] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 745.721026] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-377ab6ad-e6c0-4579-bdbc-4f11a45b4cd4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.751912] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Reconfiguring VM instance instance-00000018 to attach disk [datastore2] cdf0ea6e-d884-49c1-87ec-cd6de1376c7f/cdf0ea6e-d884-49c1-87ec-cd6de1376c7f.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 745.757236] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b602887-8f11-4245-8c96-001eea94c558 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.784704] env[69927]: DEBUG oslo_vmware.api [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Waiting for the task: (returnval){ [ 745.784704] env[69927]: value = "task-4095609" [ 745.784704] env[69927]: _type = "Task" [ 745.784704] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.801141] env[69927]: DEBUG oslo_vmware.api [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Task: {'id': task-4095609, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.930416] env[69927]: DEBUG nova.network.neutron [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Updating instance_info_cache with network_info: [{"id": "f02a1745-61d5-4414-88fe-680d9e7bba72", "address": "fa:16:3e:cb:2a:4d", "network": {"id": "f5504ce1-4b4d-41f8-90bc-de06d36f55a7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-747484643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "5e5c81a6491a4090b807b7328df7d8ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d650b26-c3e7-4de7-98db-5e4b816d123a", "external-id": "nsx-vlan-transportzone-757", "segmentation_id": 757, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf02a1745-61", "ovs_interfaceid": "f02a1745-61d5-4414-88fe-680d9e7bba72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.000874] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-843e2f97-bf73-4f41-ab78-bd0c60f5bab7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.010876] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7041e71f-a94d-47b8-9f4a-5f2362748581 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.044405] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1aa3fe-d7f5-4883-a733-359478cb4e5c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.055718] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2272bf0-6704-439c-8e63-6eb86a177476 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.070441] env[69927]: DEBUG nova.compute.provider_tree [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 746.203318] env[69927]: DEBUG nova.network.neutron [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 746.297442] env[69927]: DEBUG oslo_vmware.api [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Task: {'id': task-4095609, 'name': ReconfigVM_Task, 'duration_secs': 0.418745} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.297984] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Reconfigured VM instance instance-00000018 to attach disk [datastore2] cdf0ea6e-d884-49c1-87ec-cd6de1376c7f/cdf0ea6e-d884-49c1-87ec-cd6de1376c7f.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 746.299164] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-03f5d53d-23e3-4147-81f0-a5ab891a886a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.318027] env[69927]: DEBUG oslo_vmware.api [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Waiting for the task: (returnval){ [ 746.318027] env[69927]: value = "task-4095610" [ 746.318027] env[69927]: _type = "Task" [ 746.318027] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.329326] env[69927]: DEBUG oslo_vmware.api [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Task: {'id': task-4095610, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.388475] env[69927]: DEBUG nova.network.neutron [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Updating instance_info_cache with network_info: [{"id": "0d02a2be-1a9c-48c3-93c8-28b312303384", "address": "fa:16:3e:e8:5d:df", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d02a2be-1a", "ovs_interfaceid": "0d02a2be-1a9c-48c3-93c8-28b312303384", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.434113] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Releasing lock "refresh_cache-77c6ce9e-5e15-41e4-aa81-1ef01248aa32" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 746.573421] env[69927]: DEBUG nova.scheduler.client.report [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 746.662857] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Acquiring lock "8de4160d-2282-4ed3-bdf0-349445a6eab8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.663109] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Lock "8de4160d-2282-4ed3-bdf0-349445a6eab8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 746.825242] env[69927]: DEBUG oslo_vmware.api [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Task: {'id': task-4095610, 'name': Rename_Task, 'duration_secs': 0.247755} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.825513] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 746.825781] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-760728e9-a57a-4aee-9cc5-be0d33001acc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.832725] env[69927]: DEBUG oslo_vmware.api [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Waiting for the task: (returnval){ [ 746.832725] env[69927]: value = "task-4095611" [ 746.832725] env[69927]: _type = "Task" [ 746.832725] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.841929] env[69927]: DEBUG oslo_vmware.api [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Task: {'id': task-4095611, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.890698] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Releasing lock "refresh_cache-14359034-232d-478f-bf65-cf9937c59229" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 746.891036] env[69927]: DEBUG nova.compute.manager [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Instance network_info: |[{"id": "0d02a2be-1a9c-48c3-93c8-28b312303384", "address": "fa:16:3e:e8:5d:df", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d02a2be-1a", "ovs_interfaceid": "0d02a2be-1a9c-48c3-93c8-28b312303384", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 746.891776] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:5d:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '10b81051-1eb1-406b-888c-4548c470c77e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0d02a2be-1a9c-48c3-93c8-28b312303384', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 746.900260] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 746.900521] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14359034-232d-478f-bf65-cf9937c59229] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 746.900757] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ac73567b-ca86-40b3-92ac-367b8ab58920 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.922080] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 746.922080] env[69927]: value = "task-4095612" [ 746.922080] env[69927]: _type = "Task" [ 746.922080] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.931626] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095612, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.058506] env[69927]: DEBUG nova.compute.manager [req-3ae4e357-d9ea-4580-b60d-383007c2f08a req-677d633b-a401-4ddf-87f3-f5dcc9d098aa service nova] [instance: 14359034-232d-478f-bf65-cf9937c59229] Received event network-vif-plugged-0d02a2be-1a9c-48c3-93c8-28b312303384 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 747.058866] env[69927]: DEBUG oslo_concurrency.lockutils [req-3ae4e357-d9ea-4580-b60d-383007c2f08a req-677d633b-a401-4ddf-87f3-f5dcc9d098aa service nova] Acquiring lock "14359034-232d-478f-bf65-cf9937c59229-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.063493] env[69927]: DEBUG oslo_concurrency.lockutils [req-3ae4e357-d9ea-4580-b60d-383007c2f08a req-677d633b-a401-4ddf-87f3-f5dcc9d098aa service nova] Lock "14359034-232d-478f-bf65-cf9937c59229-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.063493] env[69927]: DEBUG oslo_concurrency.lockutils [req-3ae4e357-d9ea-4580-b60d-383007c2f08a req-677d633b-a401-4ddf-87f3-f5dcc9d098aa service nova] Lock "14359034-232d-478f-bf65-cf9937c59229-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 747.063493] env[69927]: DEBUG nova.compute.manager [req-3ae4e357-d9ea-4580-b60d-383007c2f08a req-677d633b-a401-4ddf-87f3-f5dcc9d098aa service nova] [instance: 14359034-232d-478f-bf65-cf9937c59229] No waiting events found dispatching network-vif-plugged-0d02a2be-1a9c-48c3-93c8-28b312303384 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 747.063493] env[69927]: WARNING nova.compute.manager [req-3ae4e357-d9ea-4580-b60d-383007c2f08a req-677d633b-a401-4ddf-87f3-f5dcc9d098aa service nova] [instance: 14359034-232d-478f-bf65-cf9937c59229] Received unexpected event network-vif-plugged-0d02a2be-1a9c-48c3-93c8-28b312303384 for instance with vm_state building and task_state spawning. [ 747.063493] env[69927]: DEBUG nova.compute.manager [req-3ae4e357-d9ea-4580-b60d-383007c2f08a req-677d633b-a401-4ddf-87f3-f5dcc9d098aa service nova] [instance: 14359034-232d-478f-bf65-cf9937c59229] Received event network-changed-0d02a2be-1a9c-48c3-93c8-28b312303384 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 747.063689] env[69927]: DEBUG nova.compute.manager [req-3ae4e357-d9ea-4580-b60d-383007c2f08a req-677d633b-a401-4ddf-87f3-f5dcc9d098aa service nova] [instance: 14359034-232d-478f-bf65-cf9937c59229] Refreshing instance network info cache due to event network-changed-0d02a2be-1a9c-48c3-93c8-28b312303384. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 747.063689] env[69927]: DEBUG oslo_concurrency.lockutils [req-3ae4e357-d9ea-4580-b60d-383007c2f08a req-677d633b-a401-4ddf-87f3-f5dcc9d098aa service nova] Acquiring lock "refresh_cache-14359034-232d-478f-bf65-cf9937c59229" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.063689] env[69927]: DEBUG oslo_concurrency.lockutils [req-3ae4e357-d9ea-4580-b60d-383007c2f08a req-677d633b-a401-4ddf-87f3-f5dcc9d098aa service nova] Acquired lock "refresh_cache-14359034-232d-478f-bf65-cf9937c59229" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.063689] env[69927]: DEBUG nova.network.neutron [req-3ae4e357-d9ea-4580-b60d-383007c2f08a req-677d633b-a401-4ddf-87f3-f5dcc9d098aa service nova] [instance: 14359034-232d-478f-bf65-cf9937c59229] Refreshing network info cache for port 0d02a2be-1a9c-48c3-93c8-28b312303384 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 747.081193] env[69927]: DEBUG oslo_concurrency.lockutils [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.775s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 747.082299] env[69927]: DEBUG nova.compute.manager [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 747.087876] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.079s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.091949] env[69927]: INFO nova.compute.claims [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 747.343318] env[69927]: DEBUG oslo_vmware.api [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Task: {'id': task-4095611, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.432474] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095612, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.591276] env[69927]: DEBUG nova.compute.utils [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 747.591389] env[69927]: DEBUG nova.compute.manager [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 747.591662] env[69927]: DEBUG nova.network.neutron [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 747.661399] env[69927]: DEBUG oslo_concurrency.lockutils [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Acquiring lock "c7451ca3-f1fc-469b-b9d2-7fe24cb8949e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.661886] env[69927]: DEBUG oslo_concurrency.lockutils [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Lock "c7451ca3-f1fc-469b-b9d2-7fe24cb8949e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.668302] env[69927]: DEBUG nova.policy [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9961f0c39b32467b88878373a3374aae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de0b560d18954fd68f7eceeb96c37055', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 747.845034] env[69927]: DEBUG oslo_vmware.api [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Task: {'id': task-4095611, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.923346] env[69927]: DEBUG nova.network.neutron [req-3ae4e357-d9ea-4580-b60d-383007c2f08a req-677d633b-a401-4ddf-87f3-f5dcc9d098aa service nova] [instance: 14359034-232d-478f-bf65-cf9937c59229] Updated VIF entry in instance network info cache for port 0d02a2be-1a9c-48c3-93c8-28b312303384. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 747.923812] env[69927]: DEBUG nova.network.neutron [req-3ae4e357-d9ea-4580-b60d-383007c2f08a req-677d633b-a401-4ddf-87f3-f5dcc9d098aa service nova] [instance: 14359034-232d-478f-bf65-cf9937c59229] Updating instance_info_cache with network_info: [{"id": "0d02a2be-1a9c-48c3-93c8-28b312303384", "address": "fa:16:3e:e8:5d:df", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d02a2be-1a", "ovs_interfaceid": "0d02a2be-1a9c-48c3-93c8-28b312303384", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.943241] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095612, 'name': CreateVM_Task, 'duration_secs': 0.892061} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.943513] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14359034-232d-478f-bf65-cf9937c59229] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 747.944389] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.944584] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.944995] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 747.945341] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3c7c539-5b2c-4e36-84b4-d90ed6698d5c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.953795] env[69927]: DEBUG oslo_vmware.api [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 747.953795] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5214c64f-ca38-5430-9505-6fcea0d8c4de" [ 747.953795] env[69927]: _type = "Task" [ 747.953795] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.964896] env[69927]: DEBUG oslo_vmware.api [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5214c64f-ca38-5430-9505-6fcea0d8c4de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.980294] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 747.980960] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0a8b2c12-c3f7-45ac-b22b-b7839a8f1599 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.991490] env[69927]: DEBUG oslo_vmware.api [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 747.991490] env[69927]: value = "task-4095613" [ 747.991490] env[69927]: _type = "Task" [ 747.991490] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.002420] env[69927]: DEBUG oslo_vmware.api [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095613, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.095187] env[69927]: DEBUG nova.compute.manager [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 748.146526] env[69927]: DEBUG nova.network.neutron [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Successfully created port: 2d989e8c-d768-494a-a866-4da8ff809d05 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 748.344216] env[69927]: DEBUG oslo_vmware.api [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Task: {'id': task-4095611, 'name': PowerOnVM_Task, 'duration_secs': 1.204737} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.346973] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 748.347330] env[69927]: INFO nova.compute.manager [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Took 7.39 seconds to spawn the instance on the hypervisor. [ 748.347594] env[69927]: DEBUG nova.compute.manager [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 748.349419] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d2d2d50-215f-49c5-86ce-714cdf02b4c4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.434298] env[69927]: DEBUG oslo_concurrency.lockutils [req-3ae4e357-d9ea-4580-b60d-383007c2f08a req-677d633b-a401-4ddf-87f3-f5dcc9d098aa service nova] Releasing lock "refresh_cache-14359034-232d-478f-bf65-cf9937c59229" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 748.467707] env[69927]: DEBUG oslo_vmware.api [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5214c64f-ca38-5430-9505-6fcea0d8c4de, 'name': SearchDatastore_Task, 'duration_secs': 0.010619} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.468187] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 748.468508] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 748.468804] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.468957] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.469158] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 748.469413] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4b4a7c00-01a9-4ccf-806e-d9d008c6ccc4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.480034] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 748.480245] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 748.481072] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf0f7797-6273-4c19-b325-8579677b7de0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.487249] env[69927]: DEBUG oslo_vmware.api [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 748.487249] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52bbb061-0a77-c1a2-11be-0d683c8ce86a" [ 748.487249] env[69927]: _type = "Task" [ 748.487249] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.505573] env[69927]: DEBUG oslo_vmware.api [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095613, 'name': PowerOffVM_Task, 'duration_secs': 0.217148} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.505819] env[69927]: DEBUG oslo_vmware.api [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52bbb061-0a77-c1a2-11be-0d683c8ce86a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.508482] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 748.509886] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-772374b0-4cc4-41c3-a921-29ab938e7ea9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.533286] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebd89131-ccd8-4905-92f1-28628d588147 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.578791] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 748.579711] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3523b336-49fa-4176-9766-4d49a73a519d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.588481] env[69927]: DEBUG oslo_vmware.api [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 748.588481] env[69927]: value = "task-4095614" [ 748.588481] env[69927]: _type = "Task" [ 748.588481] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.602063] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] VM already powered off {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 748.602063] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 748.602063] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.602063] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.602365] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 748.604647] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9aa5a2a7-c3a4-4140-991e-42cfebc257af {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.623461] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 748.623671] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 748.624611] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-420fd5b1-49cd-4bec-a19f-0feff0385f70 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.634535] env[69927]: DEBUG oslo_vmware.api [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 748.634535] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520df4c0-36bb-222f-f571-c410f8def6dd" [ 748.634535] env[69927]: _type = "Task" [ 748.634535] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.644629] env[69927]: DEBUG oslo_vmware.api [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520df4c0-36bb-222f-f571-c410f8def6dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.752021] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-890f47fb-7350-4218-b971-539650a72628 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.759519] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-300e1714-8f71-43fd-a08b-3c930991a9b0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.790395] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5419ae24-0a7d-4e06-8364-6a66fd74cd6d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.798822] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c8c0f9-2295-4938-a462-3f4f4af5539a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.813958] env[69927]: DEBUG nova.compute.provider_tree [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 748.868577] env[69927]: INFO nova.compute.manager [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Took 39.24 seconds to build instance. [ 748.999986] env[69927]: DEBUG oslo_vmware.api [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52bbb061-0a77-c1a2-11be-0d683c8ce86a, 'name': SearchDatastore_Task, 'duration_secs': 0.02877} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.000591] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6825ac44-46b7-4f9c-8b1a-eee4c7c8ec3c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.006893] env[69927]: DEBUG oslo_vmware.api [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 749.006893] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522776ae-3260-5f61-6e64-069f4a85c6df" [ 749.006893] env[69927]: _type = "Task" [ 749.006893] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.015086] env[69927]: DEBUG oslo_vmware.api [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522776ae-3260-5f61-6e64-069f4a85c6df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.107942] env[69927]: DEBUG nova.compute.manager [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 749.133555] env[69927]: DEBUG nova.virt.hardware [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 749.133825] env[69927]: DEBUG nova.virt.hardware [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 749.134020] env[69927]: DEBUG nova.virt.hardware [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 749.134216] env[69927]: DEBUG nova.virt.hardware [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 749.134366] env[69927]: DEBUG nova.virt.hardware [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 749.134513] env[69927]: DEBUG nova.virt.hardware [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 749.134724] env[69927]: DEBUG nova.virt.hardware [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 749.134882] env[69927]: DEBUG nova.virt.hardware [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 749.135115] env[69927]: DEBUG nova.virt.hardware [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 749.135301] env[69927]: DEBUG nova.virt.hardware [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 749.135477] env[69927]: DEBUG nova.virt.hardware [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 749.136844] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568f6727-0029-4664-b389-05b8c6048396 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.151612] env[69927]: DEBUG oslo_vmware.api [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520df4c0-36bb-222f-f571-c410f8def6dd, 'name': SearchDatastore_Task, 'duration_secs': 0.036131} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.152953] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-506183d7-8e1e-4e1c-8bcc-aa52635424e2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.158037] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c485a6af-5c44-41ff-8feb-10eb0a4394a9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.173944] env[69927]: DEBUG oslo_vmware.api [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 749.173944] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52cf1e35-d319-4571-4c4c-2195405fd6de" [ 749.173944] env[69927]: _type = "Task" [ 749.173944] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.183282] env[69927]: DEBUG oslo_vmware.api [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52cf1e35-d319-4571-4c4c-2195405fd6de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.317695] env[69927]: DEBUG nova.scheduler.client.report [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 749.371979] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b02a913c-18df-48d1-8fcd-b9e6f3fba380 tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Lock "cdf0ea6e-d884-49c1-87ec-cd6de1376c7f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.887s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.518768] env[69927]: DEBUG oslo_vmware.api [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522776ae-3260-5f61-6e64-069f4a85c6df, 'name': SearchDatastore_Task, 'duration_secs': 0.015761} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.519273] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.519306] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 14359034-232d-478f-bf65-cf9937c59229/14359034-232d-478f-bf65-cf9937c59229.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 749.519594] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5c300c1-3cfc-4e3b-92e3-d06df434d1e6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.528274] env[69927]: DEBUG oslo_vmware.api [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 749.528274] env[69927]: value = "task-4095615" [ 749.528274] env[69927]: _type = "Task" [ 749.528274] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.538702] env[69927]: DEBUG oslo_vmware.api [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095615, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.565534] env[69927]: DEBUG nova.compute.manager [req-9e8cf326-c6b5-404c-867a-1c47ecf64d67 req-a2c06a71-09d3-4b2b-b3ee-aeb36bf399b0 service nova] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Received event network-vif-plugged-2d989e8c-d768-494a-a866-4da8ff809d05 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 749.565754] env[69927]: DEBUG oslo_concurrency.lockutils [req-9e8cf326-c6b5-404c-867a-1c47ecf64d67 req-a2c06a71-09d3-4b2b-b3ee-aeb36bf399b0 service nova] Acquiring lock "f6972b90-7746-4a37-8be8-1739f96dc3dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.565959] env[69927]: DEBUG oslo_concurrency.lockutils [req-9e8cf326-c6b5-404c-867a-1c47ecf64d67 req-a2c06a71-09d3-4b2b-b3ee-aeb36bf399b0 service nova] Lock "f6972b90-7746-4a37-8be8-1739f96dc3dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.566205] env[69927]: DEBUG oslo_concurrency.lockutils [req-9e8cf326-c6b5-404c-867a-1c47ecf64d67 req-a2c06a71-09d3-4b2b-b3ee-aeb36bf399b0 service nova] Lock "f6972b90-7746-4a37-8be8-1739f96dc3dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.566319] env[69927]: DEBUG nova.compute.manager [req-9e8cf326-c6b5-404c-867a-1c47ecf64d67 req-a2c06a71-09d3-4b2b-b3ee-aeb36bf399b0 service nova] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] No waiting events found dispatching network-vif-plugged-2d989e8c-d768-494a-a866-4da8ff809d05 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 749.566481] env[69927]: WARNING nova.compute.manager [req-9e8cf326-c6b5-404c-867a-1c47ecf64d67 req-a2c06a71-09d3-4b2b-b3ee-aeb36bf399b0 service nova] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Received unexpected event network-vif-plugged-2d989e8c-d768-494a-a866-4da8ff809d05 for instance with vm_state building and task_state spawning. [ 749.586768] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Acquiring lock "cdf0ea6e-d884-49c1-87ec-cd6de1376c7f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.586988] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Lock "cdf0ea6e-d884-49c1-87ec-cd6de1376c7f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.587221] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Acquiring lock "cdf0ea6e-d884-49c1-87ec-cd6de1376c7f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.587398] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Lock "cdf0ea6e-d884-49c1-87ec-cd6de1376c7f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.587564] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Lock "cdf0ea6e-d884-49c1-87ec-cd6de1376c7f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.590670] env[69927]: INFO nova.compute.manager [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Terminating instance [ 749.664118] env[69927]: DEBUG nova.network.neutron [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Successfully updated port: 2d989e8c-d768-494a-a866-4da8ff809d05 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 749.687808] env[69927]: DEBUG oslo_vmware.api [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52cf1e35-d319-4571-4c4c-2195405fd6de, 'name': SearchDatastore_Task, 'duration_secs': 0.020356} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.688400] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.688668] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 77c6ce9e-5e15-41e4-aa81-1ef01248aa32/f524494e-9179-4b3e-a3e2-782f019def24-rescue.vmdk. {{(pid=69927) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 749.688962] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7429e257-fb95-4911-80dc-7a098b0ee126 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.698377] env[69927]: DEBUG oslo_vmware.api [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 749.698377] env[69927]: value = "task-4095616" [ 749.698377] env[69927]: _type = "Task" [ 749.698377] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.709292] env[69927]: DEBUG oslo_vmware.api [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095616, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.823341] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.736s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.823951] env[69927]: DEBUG nova.compute.manager [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 749.827726] env[69927]: DEBUG oslo_concurrency.lockutils [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.720s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.828167] env[69927]: DEBUG nova.objects.instance [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Lazy-loading 'resources' on Instance uuid 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 749.873320] env[69927]: DEBUG nova.compute.manager [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 750.043243] env[69927]: DEBUG oslo_vmware.api [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095615, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468942} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.043647] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 14359034-232d-478f-bf65-cf9937c59229/14359034-232d-478f-bf65-cf9937c59229.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 750.044212] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 750.044436] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-575534be-0db1-4748-b9c6-ab8887b62550 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.055348] env[69927]: DEBUG oslo_vmware.api [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 750.055348] env[69927]: value = "task-4095617" [ 750.055348] env[69927]: _type = "Task" [ 750.055348] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.067565] env[69927]: DEBUG oslo_vmware.api [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095617, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.095632] env[69927]: DEBUG nova.compute.manager [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 750.096015] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 750.097446] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d67d0346-16d7-49c5-8f39-26a8d3f0fc6d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.109172] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 750.109485] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-50ef8717-a48c-4ab1-89cd-a5d2787cbb11 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.118579] env[69927]: DEBUG oslo_vmware.api [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Waiting for the task: (returnval){ [ 750.118579] env[69927]: value = "task-4095618" [ 750.118579] env[69927]: _type = "Task" [ 750.118579] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.130057] env[69927]: DEBUG oslo_vmware.api [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Task: {'id': task-4095618, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.167662] env[69927]: DEBUG oslo_concurrency.lockutils [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "refresh_cache-f6972b90-7746-4a37-8be8-1739f96dc3dc" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.167881] env[69927]: DEBUG oslo_concurrency.lockutils [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquired lock "refresh_cache-f6972b90-7746-4a37-8be8-1739f96dc3dc" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 750.168115] env[69927]: DEBUG nova.network.neutron [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 750.214831] env[69927]: DEBUG oslo_vmware.api [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095616, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.332024] env[69927]: DEBUG nova.compute.utils [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 750.332024] env[69927]: DEBUG nova.compute.manager [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Not allocating networking since 'none' was specified. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 750.393995] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.565785] env[69927]: DEBUG oslo_vmware.api [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095617, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109276} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.568380] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 750.569345] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c08ce5e-f7eb-4497-adc2-1c52c4d475e3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.590821] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Reconfiguring VM instance instance-00000019 to attach disk [datastore2] 14359034-232d-478f-bf65-cf9937c59229/14359034-232d-478f-bf65-cf9937c59229.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 750.593443] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2564b2b-595a-42e9-b0a2-e4cb7685ee31 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.615471] env[69927]: DEBUG oslo_vmware.api [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 750.615471] env[69927]: value = "task-4095619" [ 750.615471] env[69927]: _type = "Task" [ 750.615471] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.627839] env[69927]: DEBUG oslo_vmware.api [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095619, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.632355] env[69927]: DEBUG oslo_vmware.api [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Task: {'id': task-4095618, 'name': PowerOffVM_Task, 'duration_secs': 0.295032} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.632615] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 750.632804] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 750.633038] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-10d2efec-caf2-43a7-914f-3452026c1c8c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.707474] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 750.707700] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 750.707888] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Deleting the datastore file [datastore2] cdf0ea6e-d884-49c1-87ec-cd6de1376c7f {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 750.708711] env[69927]: DEBUG nova.network.neutron [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.710528] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-39048c52-9e5b-42cd-89b9-ebe56c00ec03 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.716321] env[69927]: DEBUG oslo_vmware.api [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095616, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.678353} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.716403] env[69927]: INFO nova.virt.vmwareapi.ds_util [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 77c6ce9e-5e15-41e4-aa81-1ef01248aa32/f524494e-9179-4b3e-a3e2-782f019def24-rescue.vmdk. [ 750.717185] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9b76fcf-ca20-49cd-a79a-d8ef8828352c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.723476] env[69927]: DEBUG oslo_vmware.api [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Waiting for the task: (returnval){ [ 750.723476] env[69927]: value = "task-4095621" [ 750.723476] env[69927]: _type = "Task" [ 750.723476] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.747380] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] 77c6ce9e-5e15-41e4-aa81-1ef01248aa32/f524494e-9179-4b3e-a3e2-782f019def24-rescue.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 750.752671] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82ce0d89-98e1-44c8-b275-b0602f69ab87 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.769749] env[69927]: DEBUG oslo_vmware.api [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Task: {'id': task-4095621, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.775907] env[69927]: DEBUG oslo_vmware.api [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 750.775907] env[69927]: value = "task-4095622" [ 750.775907] env[69927]: _type = "Task" [ 750.775907] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.787160] env[69927]: DEBUG oslo_vmware.api [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095622, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.840161] env[69927]: DEBUG nova.compute.manager [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 750.898713] env[69927]: DEBUG nova.network.neutron [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Updating instance_info_cache with network_info: [{"id": "2d989e8c-d768-494a-a866-4da8ff809d05", "address": "fa:16:3e:02:b9:e7", "network": {"id": "e48c0c04-ce1d-41db-8e56-0954dfca129f", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2007911751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0b560d18954fd68f7eceeb96c37055", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d989e8c-d7", "ovs_interfaceid": "2d989e8c-d768-494a-a866-4da8ff809d05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.965918] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04eae748-4e6b-43b4-93e9-7ff56bfb5d24 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.974697] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62bd569b-50a4-42b3-b455-c0e5e1d4afcb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.008246] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65a912cf-f1da-4f97-b79d-7f5c6c93ba40 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.016337] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25c51ef-3e05-43f1-bb81-35164affe0c2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.030722] env[69927]: DEBUG nova.compute.provider_tree [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 751.127119] env[69927]: DEBUG oslo_vmware.api [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095619, 'name': ReconfigVM_Task, 'duration_secs': 0.353451} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.127440] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Reconfigured VM instance instance-00000019 to attach disk [datastore2] 14359034-232d-478f-bf65-cf9937c59229/14359034-232d-478f-bf65-cf9937c59229.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 751.128130] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ad692c9d-9e5b-4e22-9c6a-d35d7598a1a6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.136428] env[69927]: DEBUG oslo_vmware.api [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 751.136428] env[69927]: value = "task-4095623" [ 751.136428] env[69927]: _type = "Task" [ 751.136428] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.146388] env[69927]: DEBUG oslo_vmware.api [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095623, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.234773] env[69927]: DEBUG oslo_vmware.api [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Task: {'id': task-4095621, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.182118} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.235072] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 751.235267] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 751.235447] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 751.235624] env[69927]: INFO nova.compute.manager [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Took 1.14 seconds to destroy the instance on the hypervisor. [ 751.235877] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 751.236152] env[69927]: DEBUG nova.compute.manager [-] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 751.236257] env[69927]: DEBUG nova.network.neutron [-] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 751.286822] env[69927]: DEBUG oslo_vmware.api [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095622, 'name': ReconfigVM_Task, 'duration_secs': 0.322122} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.287761] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Reconfigured VM instance instance-00000017 to attach disk [datastore1] 77c6ce9e-5e15-41e4-aa81-1ef01248aa32/f524494e-9179-4b3e-a3e2-782f019def24-rescue.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 751.288146] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f83ce7-a0d1-434d-91a4-f8557dd58ca9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.318407] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f01faf0e-b626-40a3-9fe5-69dd2a378ed8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.338192] env[69927]: DEBUG oslo_vmware.api [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 751.338192] env[69927]: value = "task-4095624" [ 751.338192] env[69927]: _type = "Task" [ 751.338192] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.349948] env[69927]: DEBUG oslo_vmware.api [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095624, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.401640] env[69927]: DEBUG oslo_concurrency.lockutils [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Releasing lock "refresh_cache-f6972b90-7746-4a37-8be8-1739f96dc3dc" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.405864] env[69927]: DEBUG nova.compute.manager [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Instance network_info: |[{"id": "2d989e8c-d768-494a-a866-4da8ff809d05", "address": "fa:16:3e:02:b9:e7", "network": {"id": "e48c0c04-ce1d-41db-8e56-0954dfca129f", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2007911751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0b560d18954fd68f7eceeb96c37055", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d989e8c-d7", "ovs_interfaceid": "2d989e8c-d768-494a-a866-4da8ff809d05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 751.406655] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:b9:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2d989e8c-d768-494a-a866-4da8ff809d05', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 751.415475] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Creating folder: Project (de0b560d18954fd68f7eceeb96c37055). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 751.416405] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c1c815ec-0b4d-49b3-a4bd-6ac5d54c002e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.429205] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Created folder: Project (de0b560d18954fd68f7eceeb96c37055) in parent group-v811283. [ 751.429535] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Creating folder: Instances. Parent ref: group-v811363. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 751.429870] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a23a831-e93c-4c65-a7a7-0a19d3131bd1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.442814] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Created folder: Instances in parent group-v811363. [ 751.443326] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 751.443582] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 751.443804] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-91b981cd-1924-450f-806a-4c450a897d69 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.465941] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 751.465941] env[69927]: value = "task-4095627" [ 751.465941] env[69927]: _type = "Task" [ 751.465941] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.474771] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095627, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.535027] env[69927]: DEBUG nova.scheduler.client.report [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 751.647593] env[69927]: DEBUG oslo_vmware.api [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095623, 'name': Rename_Task, 'duration_secs': 0.247706} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.647938] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 751.648218] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5de5113a-19be-4312-b6b6-5fb398639676 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.655987] env[69927]: DEBUG oslo_vmware.api [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 751.655987] env[69927]: value = "task-4095628" [ 751.655987] env[69927]: _type = "Task" [ 751.655987] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.664085] env[69927]: DEBUG nova.compute.manager [req-3fa46856-1631-4bec-97cf-47808b522e9e req-bad48145-832f-4c7f-aae2-d4ec513e2dda service nova] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Received event network-changed-2d989e8c-d768-494a-a866-4da8ff809d05 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 751.664369] env[69927]: DEBUG nova.compute.manager [req-3fa46856-1631-4bec-97cf-47808b522e9e req-bad48145-832f-4c7f-aae2-d4ec513e2dda service nova] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Refreshing instance network info cache due to event network-changed-2d989e8c-d768-494a-a866-4da8ff809d05. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 751.664595] env[69927]: DEBUG oslo_concurrency.lockutils [req-3fa46856-1631-4bec-97cf-47808b522e9e req-bad48145-832f-4c7f-aae2-d4ec513e2dda service nova] Acquiring lock "refresh_cache-f6972b90-7746-4a37-8be8-1739f96dc3dc" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.664748] env[69927]: DEBUG oslo_concurrency.lockutils [req-3fa46856-1631-4bec-97cf-47808b522e9e req-bad48145-832f-4c7f-aae2-d4ec513e2dda service nova] Acquired lock "refresh_cache-f6972b90-7746-4a37-8be8-1739f96dc3dc" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.664893] env[69927]: DEBUG nova.network.neutron [req-3fa46856-1631-4bec-97cf-47808b522e9e req-bad48145-832f-4c7f-aae2-d4ec513e2dda service nova] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Refreshing network info cache for port 2d989e8c-d768-494a-a866-4da8ff809d05 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 751.674557] env[69927]: DEBUG oslo_vmware.api [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095628, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.846277] env[69927]: DEBUG oslo_vmware.api [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095624, 'name': ReconfigVM_Task, 'duration_secs': 0.171448} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.846579] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 751.846830] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3df4b356-0ef1-4f83-9327-b0980a5b731d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.853385] env[69927]: DEBUG nova.compute.manager [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 751.857894] env[69927]: DEBUG oslo_vmware.api [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 751.857894] env[69927]: value = "task-4095629" [ 751.857894] env[69927]: _type = "Task" [ 751.857894] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.866385] env[69927]: DEBUG oslo_vmware.api [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095629, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.883411] env[69927]: DEBUG nova.virt.hardware [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 751.883672] env[69927]: DEBUG nova.virt.hardware [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 751.883828] env[69927]: DEBUG nova.virt.hardware [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 751.884051] env[69927]: DEBUG nova.virt.hardware [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 751.884215] env[69927]: DEBUG nova.virt.hardware [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 751.884368] env[69927]: DEBUG nova.virt.hardware [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 751.884582] env[69927]: DEBUG nova.virt.hardware [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 751.884743] env[69927]: DEBUG nova.virt.hardware [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 751.884952] env[69927]: DEBUG nova.virt.hardware [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 751.885160] env[69927]: DEBUG nova.virt.hardware [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 751.885335] env[69927]: DEBUG nova.virt.hardware [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 751.886288] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43862a66-222c-4157-b861-78e6769cac15 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.894578] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dafe671-35ce-478b-aa71-0644c55a4c15 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.909108] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Instance VIF info [] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 751.914783] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Creating folder: Project (21921648fd0f4055bd2c499097b4e1bf). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 751.915179] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d7e02975-d9ae-402d-801e-7e16c1a3f2bb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.926245] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Created folder: Project (21921648fd0f4055bd2c499097b4e1bf) in parent group-v811283. [ 751.926477] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Creating folder: Instances. Parent ref: group-v811366. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 751.926725] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9aeab82b-836d-4168-8b6c-3509231f7b80 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.939394] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Created folder: Instances in parent group-v811366. [ 751.940060] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 751.940060] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 751.940192] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d8f3a96d-6251-4cc9-8f10-d37581d0055a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.959466] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 751.959466] env[69927]: value = "task-4095632" [ 751.959466] env[69927]: _type = "Task" [ 751.959466] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.968178] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095632, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.977492] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095627, 'name': CreateVM_Task, 'duration_secs': 0.39023} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.977706] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 751.978460] env[69927]: DEBUG oslo_concurrency.lockutils [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.978881] env[69927]: DEBUG oslo_concurrency.lockutils [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.979009] env[69927]: DEBUG oslo_concurrency.lockutils [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 751.979236] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be83f856-30b9-431d-ae79-68f3e1a4854c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.984900] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 751.984900] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d8dcdc-7dff-3b22-2a81-76bf8fd15081" [ 751.984900] env[69927]: _type = "Task" [ 751.984900] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.994853] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d8dcdc-7dff-3b22-2a81-76bf8fd15081, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.041107] env[69927]: DEBUG oslo_concurrency.lockutils [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.213s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 752.044030] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.708s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 752.045576] env[69927]: INFO nova.compute.claims [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 752.049887] env[69927]: DEBUG nova.network.neutron [-] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.079688] env[69927]: INFO nova.scheduler.client.report [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Deleted allocations for instance 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85 [ 752.172477] env[69927]: DEBUG oslo_vmware.api [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095628, 'name': PowerOnVM_Task, 'duration_secs': 0.497558} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.173328] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 752.173328] env[69927]: INFO nova.compute.manager [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Took 8.15 seconds to spawn the instance on the hypervisor. [ 752.173328] env[69927]: DEBUG nova.compute.manager [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 752.174818] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f28576c8-d6b6-43f9-a5e7-aa2638b53e1c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.370654] env[69927]: DEBUG oslo_vmware.api [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095629, 'name': PowerOnVM_Task, 'duration_secs': 0.473257} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.370917] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 752.373774] env[69927]: DEBUG nova.compute.manager [None req-e79bd188-8d58-44e5-bb93-0b2e7820be48 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 752.374593] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ea51e5-e961-4c1f-93b9-4b67317f53e1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.399649] env[69927]: DEBUG nova.network.neutron [req-3fa46856-1631-4bec-97cf-47808b522e9e req-bad48145-832f-4c7f-aae2-d4ec513e2dda service nova] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Updated VIF entry in instance network info cache for port 2d989e8c-d768-494a-a866-4da8ff809d05. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 752.400125] env[69927]: DEBUG nova.network.neutron [req-3fa46856-1631-4bec-97cf-47808b522e9e req-bad48145-832f-4c7f-aae2-d4ec513e2dda service nova] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Updating instance_info_cache with network_info: [{"id": "2d989e8c-d768-494a-a866-4da8ff809d05", "address": "fa:16:3e:02:b9:e7", "network": {"id": "e48c0c04-ce1d-41db-8e56-0954dfca129f", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2007911751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0b560d18954fd68f7eceeb96c37055", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d989e8c-d7", "ovs_interfaceid": "2d989e8c-d768-494a-a866-4da8ff809d05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.469900] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095632, 'name': CreateVM_Task, 'duration_secs': 0.354763} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.470193] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 752.470521] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.495070] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d8dcdc-7dff-3b22-2a81-76bf8fd15081, 'name': SearchDatastore_Task, 'duration_secs': 0.012284} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.495366] env[69927]: DEBUG oslo_concurrency.lockutils [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.495597] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 752.495836] env[69927]: DEBUG oslo_concurrency.lockutils [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.495981] env[69927]: DEBUG oslo_concurrency.lockutils [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 752.496176] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 752.496450] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 752.496747] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 752.496971] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d8f25f1e-078d-4c01-8bd3-30efb6dbdc54 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.499014] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87c9dfa7-1b43-4353-b9b3-75ebc875b51a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.504555] env[69927]: DEBUG oslo_vmware.api [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 752.504555] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ad06ed-b9cc-52f6-f3e6-f23a49e9ee79" [ 752.504555] env[69927]: _type = "Task" [ 752.504555] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.509153] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 752.509368] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 752.512814] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4811b13-17f7-4767-bb6b-8241c4da6824 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.515133] env[69927]: DEBUG oslo_vmware.api [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ad06ed-b9cc-52f6-f3e6-f23a49e9ee79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.518502] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 752.518502] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529fb712-b26d-11bc-4985-d87698c23b5a" [ 752.518502] env[69927]: _type = "Task" [ 752.518502] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.527599] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529fb712-b26d-11bc-4985-d87698c23b5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.556017] env[69927]: INFO nova.compute.manager [-] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Took 1.32 seconds to deallocate network for instance. [ 752.590534] env[69927]: DEBUG oslo_concurrency.lockutils [None req-814eb623-7e27-4088-b956-b1522e1ab3b6 tempest-ServerExternalEventsTest-2037619620 tempest-ServerExternalEventsTest-2037619620-project-member] Lock "6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.958s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 752.697851] env[69927]: INFO nova.compute.manager [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Took 40.91 seconds to build instance. [ 752.903752] env[69927]: DEBUG oslo_concurrency.lockutils [req-3fa46856-1631-4bec-97cf-47808b522e9e req-bad48145-832f-4c7f-aae2-d4ec513e2dda service nova] Releasing lock "refresh_cache-f6972b90-7746-4a37-8be8-1739f96dc3dc" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.903752] env[69927]: DEBUG nova.compute.manager [req-3fa46856-1631-4bec-97cf-47808b522e9e req-bad48145-832f-4c7f-aae2-d4ec513e2dda service nova] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Received event network-vif-deleted-fef5adda-8c0b-4f55-8d26-a44ba11e36ce {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 752.904525] env[69927]: INFO nova.compute.manager [req-3fa46856-1631-4bec-97cf-47808b522e9e req-bad48145-832f-4c7f-aae2-d4ec513e2dda service nova] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Neutron deleted interface fef5adda-8c0b-4f55-8d26-a44ba11e36ce; detaching it from the instance and deleting it from the info cache [ 752.904525] env[69927]: DEBUG nova.network.neutron [req-3fa46856-1631-4bec-97cf-47808b522e9e req-bad48145-832f-4c7f-aae2-d4ec513e2dda service nova] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.016260] env[69927]: DEBUG oslo_vmware.api [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ad06ed-b9cc-52f6-f3e6-f23a49e9ee79, 'name': SearchDatastore_Task, 'duration_secs': 0.023673} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.016585] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.016825] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 753.017052] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.031609] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529fb712-b26d-11bc-4985-d87698c23b5a, 'name': SearchDatastore_Task, 'duration_secs': 0.012805} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.033064] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7710727d-c58a-4131-97c6-6857db902566 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.039849] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 753.039849] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52502534-d142-6db2-52bd-3d3e96e371f5" [ 753.039849] env[69927]: _type = "Task" [ 753.039849] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.049900] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52502534-d142-6db2-52bd-3d3e96e371f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.063507] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 753.202635] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db62ef13-d828-4299-a55c-038b49f2de70 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lock "14359034-232d-478f-bf65-cf9937c59229" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.239s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 753.409137] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-29ea749e-dd0c-40f9-8d89-cc4ddf5c84e3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.420806] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a6f159e-9bdc-4362-b7a8-b77298768caa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.458644] env[69927]: DEBUG nova.compute.manager [req-3fa46856-1631-4bec-97cf-47808b522e9e req-bad48145-832f-4c7f-aae2-d4ec513e2dda service nova] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Detach interface failed, port_id=fef5adda-8c0b-4f55-8d26-a44ba11e36ce, reason: Instance cdf0ea6e-d884-49c1-87ec-cd6de1376c7f could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 753.552716] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52502534-d142-6db2-52bd-3d3e96e371f5, 'name': SearchDatastore_Task, 'duration_secs': 0.030141} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.552998] env[69927]: DEBUG oslo_concurrency.lockutils [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.553276] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] f6972b90-7746-4a37-8be8-1739f96dc3dc/f6972b90-7746-4a37-8be8-1739f96dc3dc.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 753.553592] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 753.554152] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 753.554152] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dcafcde1-e07f-45e0-b571-022065d9bedb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.559067] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-339d1cc0-2667-4fcc-ac3f-8ae651f67686 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.570213] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 753.570213] env[69927]: value = "task-4095633" [ 753.570213] env[69927]: _type = "Task" [ 753.570213] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.571627] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 753.571809] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 753.575474] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-118b31cb-22fb-4c3a-894f-77d40504e8ed {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.584712] env[69927]: DEBUG oslo_vmware.api [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 753.584712] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52fac349-af5c-e26c-cd9f-76dd30a69b77" [ 753.584712] env[69927]: _type = "Task" [ 753.584712] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.587668] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095633, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.600969] env[69927]: DEBUG oslo_vmware.api [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52fac349-af5c-e26c-cd9f-76dd30a69b77, 'name': SearchDatastore_Task, 'duration_secs': 0.010275} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.601615] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1413325d-ed3e-4da9-924b-b42dae0f1cd7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.607964] env[69927]: DEBUG oslo_vmware.api [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 753.607964] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528b46ad-ff95-39ef-cce1-6b8efc09dec5" [ 753.607964] env[69927]: _type = "Task" [ 753.607964] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.624054] env[69927]: DEBUG oslo_vmware.api [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528b46ad-ff95-39ef-cce1-6b8efc09dec5, 'name': SearchDatastore_Task, 'duration_secs': 0.010043} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.624464] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.624822] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] a4249857-6f60-4040-b676-d2d19dc83f15/a4249857-6f60-4040-b676-d2d19dc83f15.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 753.628448] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0261c1fc-8f99-4182-b9e9-d6d3a3779997 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.638253] env[69927]: DEBUG oslo_vmware.api [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 753.638253] env[69927]: value = "task-4095634" [ 753.638253] env[69927]: _type = "Task" [ 753.638253] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.645151] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0126d28-b5ba-401c-9dc1-cac77322f084 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.655516] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa13853b-9ce8-415e-b985-ad206038b816 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.659034] env[69927]: DEBUG oslo_vmware.api [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095634, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.688218] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70838778-bf13-411d-b183-282412c14fc6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.697016] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed77b37-23d3-4d50-b6d7-eab12ca17440 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.711913] env[69927]: DEBUG nova.compute.manager [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 753.715472] env[69927]: DEBUG nova.compute.provider_tree [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 753.961899] env[69927]: DEBUG nova.compute.manager [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 753.963768] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd0de39a-43f7-4dee-b22d-f7216246361e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.082882] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095633, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507894} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.083440] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] f6972b90-7746-4a37-8be8-1739f96dc3dc/f6972b90-7746-4a37-8be8-1739f96dc3dc.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 754.086017] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 754.086017] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a691c45b-ec06-419a-bc39-469d7d0ddbab {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.094593] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 754.094593] env[69927]: value = "task-4095635" [ 754.094593] env[69927]: _type = "Task" [ 754.094593] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.107830] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095635, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.153761] env[69927]: DEBUG oslo_vmware.api [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095634, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.222206] env[69927]: DEBUG nova.scheduler.client.report [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 754.251167] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.478910] env[69927]: INFO nova.compute.manager [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] instance snapshotting [ 754.484176] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquiring lock "6be47dcb-ce00-4b81-9e69-35acabac046e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.484491] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Lock "6be47dcb-ce00-4b81-9e69-35acabac046e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.485303] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe40282e-cdce-4049-b31e-1ce5dd5cba1e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.506027] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f90a9b67-562a-4514-9dd0-de8d9065e692 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.606132] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095635, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.15004} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.606436] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 754.608507] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3df59c9a-1fee-4a43-a303-9dc301f19c47 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.630792] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] f6972b90-7746-4a37-8be8-1739f96dc3dc/f6972b90-7746-4a37-8be8-1739f96dc3dc.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 754.631528] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a020a5b-8ac6-4845-9d74-c39d576fc74c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.657092] env[69927]: DEBUG oslo_vmware.api [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095634, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.778556} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.658717] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] a4249857-6f60-4040-b676-d2d19dc83f15/a4249857-6f60-4040-b676-d2d19dc83f15.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 754.658964] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 754.659318] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 754.659318] env[69927]: value = "task-4095636" [ 754.659318] env[69927]: _type = "Task" [ 754.659318] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.659508] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e5b50be-ee2c-4502-bb8f-1695b321e3e2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.674144] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095636, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.674458] env[69927]: DEBUG oslo_vmware.api [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 754.674458] env[69927]: value = "task-4095637" [ 754.674458] env[69927]: _type = "Task" [ 754.674458] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.733695] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.690s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 754.734333] env[69927]: DEBUG nova.compute.manager [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 754.737336] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.956s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.737579] env[69927]: DEBUG nova.objects.instance [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Lazy-loading 'resources' on Instance uuid ee422a46-c6e4-4098-8f74-b9f0779d0fba {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 755.019158] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Creating Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 755.019448] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e1be2185-d128-41dc-8bce-2d5f3c1f8723 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.029684] env[69927]: DEBUG oslo_vmware.api [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 755.029684] env[69927]: value = "task-4095638" [ 755.029684] env[69927]: _type = "Task" [ 755.029684] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.046218] env[69927]: DEBUG oslo_vmware.api [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095638, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.173028] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095636, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.184402] env[69927]: DEBUG oslo_vmware.api [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095637, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.18416} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.184698] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 755.185742] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c86c0154-4d0f-4f9a-b52e-d9cd6c80bd69 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.208202] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] a4249857-6f60-4040-b676-d2d19dc83f15/a4249857-6f60-4040-b676-d2d19dc83f15.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 755.208202] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8ded64d-ef88-4d21-8166-2325d7d4c1af {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.232090] env[69927]: DEBUG oslo_vmware.api [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 755.232090] env[69927]: value = "task-4095639" [ 755.232090] env[69927]: _type = "Task" [ 755.232090] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.242259] env[69927]: DEBUG nova.compute.utils [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 755.246825] env[69927]: DEBUG oslo_vmware.api [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095639, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.247789] env[69927]: DEBUG nova.compute.manager [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 755.248032] env[69927]: DEBUG nova.network.neutron [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 755.305372] env[69927]: DEBUG nova.policy [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de8b1b11969a4feb818dc682d2fec552', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61b1aea0ccf049c8942ba32932412497', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 755.543406] env[69927]: DEBUG oslo_vmware.api [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095638, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.674791] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095636, 'name': ReconfigVM_Task, 'duration_secs': 0.676245} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.675712] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Reconfigured VM instance instance-0000001a to attach disk [datastore1] f6972b90-7746-4a37-8be8-1739f96dc3dc/f6972b90-7746-4a37-8be8-1739f96dc3dc.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 755.676410] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-59460034-2d13-4477-b243-1807ed99f120 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.684196] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 755.684196] env[69927]: value = "task-4095640" [ 755.684196] env[69927]: _type = "Task" [ 755.684196] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.694480] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095640, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.743388] env[69927]: DEBUG oslo_vmware.api [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095639, 'name': ReconfigVM_Task, 'duration_secs': 0.31245} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.746665] env[69927]: DEBUG nova.network.neutron [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Successfully created port: 9fe1cc7b-35d7-4f3c-84fa-29a7b02aaf97 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 755.748562] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Reconfigured VM instance instance-0000001b to attach disk [datastore1] a4249857-6f60-4040-b676-d2d19dc83f15/a4249857-6f60-4040-b676-d2d19dc83f15.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 755.749428] env[69927]: DEBUG nova.compute.manager [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 755.756069] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-efef0ece-309c-484f-9a17-53d01790b6be {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.764823] env[69927]: DEBUG oslo_vmware.api [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 755.764823] env[69927]: value = "task-4095641" [ 755.764823] env[69927]: _type = "Task" [ 755.764823] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.777899] env[69927]: DEBUG oslo_vmware.api [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095641, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.895606] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4a2697d-cf17-4fa3-a99b-ec6f5ce92419 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.905119] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43206e57-8274-416e-9a9b-20bc4d561adf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.949140] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b17e452-447a-4d2d-b295-62ed75d9dc7b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.959450] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b606e46-a305-4142-9e91-ee27de0b439b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.978680] env[69927]: DEBUG nova.compute.provider_tree [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 756.043248] env[69927]: DEBUG oslo_vmware.api [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095638, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.200341] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095640, 'name': Rename_Task, 'duration_secs': 0.341763} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.200341] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 756.200341] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ac45f3d7-9abf-4d4f-ab80-7057f2ceb4a2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.206668] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 756.206668] env[69927]: value = "task-4095642" [ 756.206668] env[69927]: _type = "Task" [ 756.206668] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.217529] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095642, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.282219] env[69927]: DEBUG oslo_vmware.api [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095641, 'name': Rename_Task, 'duration_secs': 0.150889} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.282630] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 756.282921] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d6ee4996-19f5-4956-a339-8b14e8115e82 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.293134] env[69927]: DEBUG oslo_vmware.api [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 756.293134] env[69927]: value = "task-4095643" [ 756.293134] env[69927]: _type = "Task" [ 756.293134] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.304153] env[69927]: DEBUG oslo_vmware.api [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095643, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.486083] env[69927]: DEBUG nova.scheduler.client.report [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 756.543221] env[69927]: DEBUG oslo_vmware.api [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095638, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.723054] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095642, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.774166] env[69927]: DEBUG nova.compute.manager [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 756.805694] env[69927]: DEBUG oslo_vmware.api [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095643, 'name': PowerOnVM_Task, 'duration_secs': 0.448031} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.808650] env[69927]: DEBUG nova.virt.hardware [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 756.808927] env[69927]: DEBUG nova.virt.hardware [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 756.809106] env[69927]: DEBUG nova.virt.hardware [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 756.809441] env[69927]: DEBUG nova.virt.hardware [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 756.809597] env[69927]: DEBUG nova.virt.hardware [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 756.809744] env[69927]: DEBUG nova.virt.hardware [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 756.809953] env[69927]: DEBUG nova.virt.hardware [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 756.810165] env[69927]: DEBUG nova.virt.hardware [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 756.810396] env[69927]: DEBUG nova.virt.hardware [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 756.810576] env[69927]: DEBUG nova.virt.hardware [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 756.810744] env[69927]: DEBUG nova.virt.hardware [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 756.811164] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 756.811378] env[69927]: INFO nova.compute.manager [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Took 4.96 seconds to spawn the instance on the hypervisor. [ 756.811684] env[69927]: DEBUG nova.compute.manager [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 756.812491] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21c075ac-bf27-42e1-9ac5-1e693157da40 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.815827] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8d16b64-c080-4842-8dbb-40ba00d89836 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.830885] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8a2311-1b71-4902-b966-89d54b781e76 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.991787] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.254s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 756.996113] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.773s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 756.996771] env[69927]: INFO nova.compute.claims [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 757.019809] env[69927]: INFO nova.scheduler.client.report [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Deleted allocations for instance ee422a46-c6e4-4098-8f74-b9f0779d0fba [ 757.046953] env[69927]: DEBUG oslo_vmware.api [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095638, 'name': CreateSnapshot_Task, 'duration_secs': 1.772744} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.047954] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Created Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 757.051085] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff86e17-e692-4852-b3f9-012687bf320e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.221150] env[69927]: DEBUG oslo_vmware.api [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095642, 'name': PowerOnVM_Task, 'duration_secs': 0.643057} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.221527] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 757.221742] env[69927]: INFO nova.compute.manager [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Took 8.11 seconds to spawn the instance on the hypervisor. [ 757.221962] env[69927]: DEBUG nova.compute.manager [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 757.222990] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71196124-45fe-4146-9807-1c98f881303a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.352177] env[69927]: INFO nova.compute.manager [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Took 37.37 seconds to build instance. [ 757.499593] env[69927]: DEBUG nova.network.neutron [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Successfully updated port: 9fe1cc7b-35d7-4f3c-84fa-29a7b02aaf97 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 757.530843] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a8f51541-4798-4b80-84a3-7e4417867cf0 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996 tempest-FloatingIPsAssociationNegativeTestJSON-1933372996-project-member] Lock "ee422a46-c6e4-4098-8f74-b9f0779d0fba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.306s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.569991] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Creating linked-clone VM from snapshot {{(pid=69927) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 757.570357] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e8dc3c85-2564-40be-a881-40612b18d65c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.580784] env[69927]: DEBUG oslo_vmware.api [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 757.580784] env[69927]: value = "task-4095644" [ 757.580784] env[69927]: _type = "Task" [ 757.580784] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.590035] env[69927]: DEBUG oslo_vmware.api [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095644, 'name': CloneVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.740324] env[69927]: DEBUG nova.compute.manager [req-cafd019d-e40e-4273-9d98-9ebdde1becee req-9fc767ea-7cc1-4c06-8b09-7e3eb07853a0 service nova] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Received event network-vif-plugged-9fe1cc7b-35d7-4f3c-84fa-29a7b02aaf97 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 757.740766] env[69927]: DEBUG oslo_concurrency.lockutils [req-cafd019d-e40e-4273-9d98-9ebdde1becee req-9fc767ea-7cc1-4c06-8b09-7e3eb07853a0 service nova] Acquiring lock "9d83dda3-5fb1-416d-9307-faeef454efec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 757.740984] env[69927]: DEBUG oslo_concurrency.lockutils [req-cafd019d-e40e-4273-9d98-9ebdde1becee req-9fc767ea-7cc1-4c06-8b09-7e3eb07853a0 service nova] Lock "9d83dda3-5fb1-416d-9307-faeef454efec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 757.742250] env[69927]: DEBUG oslo_concurrency.lockutils [req-cafd019d-e40e-4273-9d98-9ebdde1becee req-9fc767ea-7cc1-4c06-8b09-7e3eb07853a0 service nova] Lock "9d83dda3-5fb1-416d-9307-faeef454efec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.742250] env[69927]: DEBUG nova.compute.manager [req-cafd019d-e40e-4273-9d98-9ebdde1becee req-9fc767ea-7cc1-4c06-8b09-7e3eb07853a0 service nova] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] No waiting events found dispatching network-vif-plugged-9fe1cc7b-35d7-4f3c-84fa-29a7b02aaf97 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 757.742250] env[69927]: WARNING nova.compute.manager [req-cafd019d-e40e-4273-9d98-9ebdde1becee req-9fc767ea-7cc1-4c06-8b09-7e3eb07853a0 service nova] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Received unexpected event network-vif-plugged-9fe1cc7b-35d7-4f3c-84fa-29a7b02aaf97 for instance with vm_state building and task_state spawning. [ 757.750020] env[69927]: INFO nova.compute.manager [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Took 43.39 seconds to build instance. [ 757.856734] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0232a1e7-ccdd-493f-963d-ddeb17fe47f9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Lock "a4249857-6f60-4040-b676-d2d19dc83f15" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.501s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.007017] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "refresh_cache-9d83dda3-5fb1-416d-9307-faeef454efec" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.007017] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired lock "refresh_cache-9d83dda3-5fb1-416d-9307-faeef454efec" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 758.008510] env[69927]: DEBUG nova.network.neutron [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 758.094366] env[69927]: DEBUG oslo_vmware.api [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095644, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.249016] env[69927]: DEBUG oslo_concurrency.lockutils [None req-365e6496-1356-4369-962b-d7213220e718 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "f6972b90-7746-4a37-8be8-1739f96dc3dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.953s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.358670] env[69927]: DEBUG nova.compute.manager [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 758.550508] env[69927]: DEBUG nova.network.neutron [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 758.591796] env[69927]: DEBUG oslo_vmware.api [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095644, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.651678] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d5e336-9fc8-4b0c-9de6-db53a4fb56e2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.660326] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0540d50-b120-4aec-8739-e1801573f5bc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.703972] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-945eeba7-6c74-4243-8f32-7c7ab3d3e931 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.713319] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47add05d-4eb2-4db6-99ee-8d5ad959d099 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.729615] env[69927]: DEBUG nova.compute.provider_tree [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 758.756227] env[69927]: DEBUG nova.compute.manager [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 758.762745] env[69927]: DEBUG nova.network.neutron [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Updating instance_info_cache with network_info: [{"id": "9fe1cc7b-35d7-4f3c-84fa-29a7b02aaf97", "address": "fa:16:3e:df:d2:98", "network": {"id": "625b5644-8a8f-4789-8c96-083982c1bf4c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-124597975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61b1aea0ccf049c8942ba32932412497", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fe1cc7b-35", "ovs_interfaceid": "9fe1cc7b-35d7-4f3c-84fa-29a7b02aaf97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.881305] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.094054] env[69927]: DEBUG oslo_vmware.api [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095644, 'name': CloneVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.241080] env[69927]: DEBUG nova.scheduler.client.report [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 759.266292] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Releasing lock "refresh_cache-9d83dda3-5fb1-416d-9307-faeef454efec" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 759.266941] env[69927]: DEBUG nova.compute.manager [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Instance network_info: |[{"id": "9fe1cc7b-35d7-4f3c-84fa-29a7b02aaf97", "address": "fa:16:3e:df:d2:98", "network": {"id": "625b5644-8a8f-4789-8c96-083982c1bf4c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-124597975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61b1aea0ccf049c8942ba32932412497", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fe1cc7b-35", "ovs_interfaceid": "9fe1cc7b-35d7-4f3c-84fa-29a7b02aaf97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 759.267747] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:d2:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '415e68b4-3766-4359-afe2-f8563910d98c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9fe1cc7b-35d7-4f3c-84fa-29a7b02aaf97', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 759.276424] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Creating folder: Project (61b1aea0ccf049c8942ba32932412497). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 759.277419] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-934309ad-eb26-408b-99cc-68ca344e17f2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.283737] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.290020] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Created folder: Project (61b1aea0ccf049c8942ba32932412497) in parent group-v811283. [ 759.290020] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Creating folder: Instances. Parent ref: group-v811371. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 759.290020] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0eb7f857-ebc4-49c7-86e8-6c098ce27897 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.300863] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Created folder: Instances in parent group-v811371. [ 759.301473] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 759.302298] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 759.302601] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-136eda7e-00f0-41f1-8eab-51a8df5bc12a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.326049] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 759.326049] env[69927]: value = "task-4095647" [ 759.326049] env[69927]: _type = "Task" [ 759.326049] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.336166] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095647, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.596364] env[69927]: DEBUG oslo_vmware.api [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095644, 'name': CloneVM_Task, 'duration_secs': 1.532005} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.596666] env[69927]: INFO nova.virt.vmwareapi.vmops [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Created linked-clone VM from snapshot [ 759.598330] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43400549-b518-4fac-9baf-ed638a225a1b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.609442] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Uploading image 00f6ea29-ebc1-44c9-85c4-33a5df1836df {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 759.638479] env[69927]: DEBUG oslo_vmware.rw_handles [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 759.638479] env[69927]: value = "vm-811370" [ 759.638479] env[69927]: _type = "VirtualMachine" [ 759.638479] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 759.638903] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-13286437-6390-4eaa-9870-ee57e72e7e17 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.650735] env[69927]: DEBUG oslo_vmware.rw_handles [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lease: (returnval){ [ 759.650735] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b79295-5f3c-dc4f-8b84-be57f0e5fe68" [ 759.650735] env[69927]: _type = "HttpNfcLease" [ 759.650735] env[69927]: } obtained for exporting VM: (result){ [ 759.650735] env[69927]: value = "vm-811370" [ 759.650735] env[69927]: _type = "VirtualMachine" [ 759.650735] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 759.652165] env[69927]: DEBUG oslo_vmware.api [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the lease: (returnval){ [ 759.652165] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b79295-5f3c-dc4f-8b84-be57f0e5fe68" [ 759.652165] env[69927]: _type = "HttpNfcLease" [ 759.652165] env[69927]: } to be ready. {{(pid=69927) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 759.660902] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 759.660902] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b79295-5f3c-dc4f-8b84-be57f0e5fe68" [ 759.660902] env[69927]: _type = "HttpNfcLease" [ 759.660902] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 759.747074] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.750s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 759.747074] env[69927]: DEBUG nova.compute.manager [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 759.748979] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.665s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 759.750530] env[69927]: INFO nova.compute.claims [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 759.846030] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095647, 'name': CreateVM_Task, 'duration_secs': 0.408814} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.847114] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 759.847319] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.847628] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 759.848086] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 759.848487] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05ec61af-51a3-4402-9a5e-048555360566 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.854958] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 759.854958] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520d472e-5e51-4b98-75b0-487e4a06cbb4" [ 759.854958] env[69927]: _type = "Task" [ 759.854958] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.863997] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520d472e-5e51-4b98-75b0-487e4a06cbb4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.161666] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 760.161666] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b79295-5f3c-dc4f-8b84-be57f0e5fe68" [ 760.161666] env[69927]: _type = "HttpNfcLease" [ 760.161666] env[69927]: } is ready. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 760.161666] env[69927]: DEBUG oslo_vmware.rw_handles [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 760.161666] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b79295-5f3c-dc4f-8b84-be57f0e5fe68" [ 760.161666] env[69927]: _type = "HttpNfcLease" [ 760.161666] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 760.162996] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a432dbe8-5daa-400f-a95d-b83b36c2abbe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.171844] env[69927]: DEBUG oslo_vmware.rw_handles [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d75648-a327-9f35-1310-7a2cb5c89e0b/disk-0.vmdk from lease info. {{(pid=69927) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 760.172050] env[69927]: DEBUG oslo_vmware.rw_handles [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d75648-a327-9f35-1310-7a2cb5c89e0b/disk-0.vmdk for reading. {{(pid=69927) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 760.255787] env[69927]: DEBUG nova.compute.utils [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 760.267539] env[69927]: DEBUG nova.compute.manager [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 760.267741] env[69927]: DEBUG nova.network.neutron [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 760.299877] env[69927]: DEBUG nova.compute.manager [req-18817e23-e4f6-46aa-aa19-f14f802bcb38 req-375a0d7f-57e1-4b3b-91fc-c30cd4bd7996 service nova] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Received event network-changed-9fe1cc7b-35d7-4f3c-84fa-29a7b02aaf97 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 760.299877] env[69927]: DEBUG nova.compute.manager [req-18817e23-e4f6-46aa-aa19-f14f802bcb38 req-375a0d7f-57e1-4b3b-91fc-c30cd4bd7996 service nova] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Refreshing instance network info cache due to event network-changed-9fe1cc7b-35d7-4f3c-84fa-29a7b02aaf97. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 760.299877] env[69927]: DEBUG oslo_concurrency.lockutils [req-18817e23-e4f6-46aa-aa19-f14f802bcb38 req-375a0d7f-57e1-4b3b-91fc-c30cd4bd7996 service nova] Acquiring lock "refresh_cache-9d83dda3-5fb1-416d-9307-faeef454efec" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.299877] env[69927]: DEBUG oslo_concurrency.lockutils [req-18817e23-e4f6-46aa-aa19-f14f802bcb38 req-375a0d7f-57e1-4b3b-91fc-c30cd4bd7996 service nova] Acquired lock "refresh_cache-9d83dda3-5fb1-416d-9307-faeef454efec" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 760.299877] env[69927]: DEBUG nova.network.neutron [req-18817e23-e4f6-46aa-aa19-f14f802bcb38 req-375a0d7f-57e1-4b3b-91fc-c30cd4bd7996 service nova] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Refreshing network info cache for port 9fe1cc7b-35d7-4f3c-84fa-29a7b02aaf97 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 760.310325] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f04376f7-499a-4407-87ea-7f81a48b82d9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.345635] env[69927]: DEBUG nova.policy [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5187d174055c4ccaa5c805ac2cad225f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '81b92defba9241bca7e1db3e91030712', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 760.366526] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520d472e-5e51-4b98-75b0-487e4a06cbb4, 'name': SearchDatastore_Task, 'duration_secs': 0.010239} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.366853] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 760.367157] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 760.367441] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.367633] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 760.367829] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 760.368167] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-81b0401e-82c3-4da5-8516-23b9926f9c32 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.379147] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 760.379350] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 760.380113] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f316ac9-cde4-4ee6-b2c8-1a9c44d58d1f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.387016] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 760.387016] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5267cf8f-3ec3-937c-1dd4-76a64ce62777" [ 760.387016] env[69927]: _type = "Task" [ 760.387016] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.400509] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5267cf8f-3ec3-937c-1dd4-76a64ce62777, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.708900] env[69927]: DEBUG nova.network.neutron [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Successfully created port: 8c604e20-6cfc-4498-a689-d24876c07513 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 760.767167] env[69927]: DEBUG nova.compute.manager [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 760.902747] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5267cf8f-3ec3-937c-1dd4-76a64ce62777, 'name': SearchDatastore_Task, 'duration_secs': 0.020527} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.903765] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e01f597-dcab-491a-aa69-d88db57d59f1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.912338] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 760.912338] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52279ea9-9751-4de6-d16d-989225bea721" [ 760.912338] env[69927]: _type = "Task" [ 760.912338] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.927930] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52279ea9-9751-4de6-d16d-989225bea721, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.190298] env[69927]: DEBUG nova.network.neutron [req-18817e23-e4f6-46aa-aa19-f14f802bcb38 req-375a0d7f-57e1-4b3b-91fc-c30cd4bd7996 service nova] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Updated VIF entry in instance network info cache for port 9fe1cc7b-35d7-4f3c-84fa-29a7b02aaf97. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 761.190642] env[69927]: DEBUG nova.network.neutron [req-18817e23-e4f6-46aa-aa19-f14f802bcb38 req-375a0d7f-57e1-4b3b-91fc-c30cd4bd7996 service nova] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Updating instance_info_cache with network_info: [{"id": "9fe1cc7b-35d7-4f3c-84fa-29a7b02aaf97", "address": "fa:16:3e:df:d2:98", "network": {"id": "625b5644-8a8f-4789-8c96-083982c1bf4c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-124597975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61b1aea0ccf049c8942ba32932412497", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fe1cc7b-35", "ovs_interfaceid": "9fe1cc7b-35d7-4f3c-84fa-29a7b02aaf97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.433477] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52279ea9-9751-4de6-d16d-989225bea721, 'name': SearchDatastore_Task, 'duration_secs': 0.021725} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.435816] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 761.435816] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 9d83dda3-5fb1-416d-9307-faeef454efec/9d83dda3-5fb1-416d-9307-faeef454efec.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 761.435970] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9f81d6e0-b2f4-41da-a4ba-f85692583a1e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.448919] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 761.448919] env[69927]: value = "task-4095649" [ 761.448919] env[69927]: _type = "Task" [ 761.448919] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.463191] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095649, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.477246] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ddc0331-d238-4460-a98c-c3d813f83015 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.486529] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9a08b1-d3b3-4100-aec8-ca18b1617cc4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.527154] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac7bab6-ba65-4961-b568-ed299a74aae8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.536927] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3ec7ae2-d339-48af-a28f-0caba3e7e129 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.555349] env[69927]: DEBUG nova.compute.provider_tree [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 761.694235] env[69927]: DEBUG oslo_concurrency.lockutils [req-18817e23-e4f6-46aa-aa19-f14f802bcb38 req-375a0d7f-57e1-4b3b-91fc-c30cd4bd7996 service nova] Releasing lock "refresh_cache-9d83dda3-5fb1-416d-9307-faeef454efec" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 761.694643] env[69927]: DEBUG nova.compute.manager [req-18817e23-e4f6-46aa-aa19-f14f802bcb38 req-375a0d7f-57e1-4b3b-91fc-c30cd4bd7996 service nova] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Received event network-changed-2d989e8c-d768-494a-a866-4da8ff809d05 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 761.694924] env[69927]: DEBUG nova.compute.manager [req-18817e23-e4f6-46aa-aa19-f14f802bcb38 req-375a0d7f-57e1-4b3b-91fc-c30cd4bd7996 service nova] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Refreshing instance network info cache due to event network-changed-2d989e8c-d768-494a-a866-4da8ff809d05. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 761.695254] env[69927]: DEBUG oslo_concurrency.lockutils [req-18817e23-e4f6-46aa-aa19-f14f802bcb38 req-375a0d7f-57e1-4b3b-91fc-c30cd4bd7996 service nova] Acquiring lock "refresh_cache-f6972b90-7746-4a37-8be8-1739f96dc3dc" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.695620] env[69927]: DEBUG oslo_concurrency.lockutils [req-18817e23-e4f6-46aa-aa19-f14f802bcb38 req-375a0d7f-57e1-4b3b-91fc-c30cd4bd7996 service nova] Acquired lock "refresh_cache-f6972b90-7746-4a37-8be8-1739f96dc3dc" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 761.695620] env[69927]: DEBUG nova.network.neutron [req-18817e23-e4f6-46aa-aa19-f14f802bcb38 req-375a0d7f-57e1-4b3b-91fc-c30cd4bd7996 service nova] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Refreshing network info cache for port 2d989e8c-d768-494a-a866-4da8ff809d05 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 761.782858] env[69927]: DEBUG nova.compute.manager [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 761.810754] env[69927]: DEBUG nova.virt.hardware [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 761.813021] env[69927]: DEBUG nova.virt.hardware [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 761.813021] env[69927]: DEBUG nova.virt.hardware [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 761.813021] env[69927]: DEBUG nova.virt.hardware [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 761.813021] env[69927]: DEBUG nova.virt.hardware [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 761.813021] env[69927]: DEBUG nova.virt.hardware [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 761.813209] env[69927]: DEBUG nova.virt.hardware [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 761.813209] env[69927]: DEBUG nova.virt.hardware [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 761.813209] env[69927]: DEBUG nova.virt.hardware [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 761.813209] env[69927]: DEBUG nova.virt.hardware [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 761.813209] env[69927]: DEBUG nova.virt.hardware [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 761.814428] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb023b6b-1f3e-4549-a615-63610af5df0d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.829231] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b366b4-f0a1-40dd-9c0a-46b834597e3e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.965863] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095649, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.059988] env[69927]: DEBUG nova.scheduler.client.report [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 762.461919] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095649, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.785399} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.462284] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 9d83dda3-5fb1-416d-9307-faeef454efec/9d83dda3-5fb1-416d-9307-faeef454efec.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 762.462844] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 762.462990] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b5103242-67a6-44ce-902a-17b16fe6615f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.470918] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 762.470918] env[69927]: value = "task-4095650" [ 762.470918] env[69927]: _type = "Task" [ 762.470918] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.483701] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095650, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.567967] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.819s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.568532] env[69927]: DEBUG nova.compute.manager [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 762.571621] env[69927]: DEBUG oslo_concurrency.lockutils [None req-42cf5530-a9a6-4592-8f5d-a2c041dffc3e tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 25.123s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.670952] env[69927]: DEBUG nova.compute.manager [req-5b78b66b-ad78-446e-85f9-d04849531be6 req-ada072ff-e3a2-440d-84a9-33f4ef036d38 service nova] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Received event network-vif-plugged-8c604e20-6cfc-4498-a689-d24876c07513 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 762.670952] env[69927]: DEBUG oslo_concurrency.lockutils [req-5b78b66b-ad78-446e-85f9-d04849531be6 req-ada072ff-e3a2-440d-84a9-33f4ef036d38 service nova] Acquiring lock "95c02aa2-d587-4c9f-9b02-2992dfe5b1be-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.670952] env[69927]: DEBUG oslo_concurrency.lockutils [req-5b78b66b-ad78-446e-85f9-d04849531be6 req-ada072ff-e3a2-440d-84a9-33f4ef036d38 service nova] Lock "95c02aa2-d587-4c9f-9b02-2992dfe5b1be-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.670952] env[69927]: DEBUG oslo_concurrency.lockutils [req-5b78b66b-ad78-446e-85f9-d04849531be6 req-ada072ff-e3a2-440d-84a9-33f4ef036d38 service nova] Lock "95c02aa2-d587-4c9f-9b02-2992dfe5b1be-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.670952] env[69927]: DEBUG nova.compute.manager [req-5b78b66b-ad78-446e-85f9-d04849531be6 req-ada072ff-e3a2-440d-84a9-33f4ef036d38 service nova] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] No waiting events found dispatching network-vif-plugged-8c604e20-6cfc-4498-a689-d24876c07513 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 762.671140] env[69927]: WARNING nova.compute.manager [req-5b78b66b-ad78-446e-85f9-d04849531be6 req-ada072ff-e3a2-440d-84a9-33f4ef036d38 service nova] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Received unexpected event network-vif-plugged-8c604e20-6cfc-4498-a689-d24876c07513 for instance with vm_state building and task_state spawning. [ 762.755742] env[69927]: DEBUG nova.network.neutron [req-18817e23-e4f6-46aa-aa19-f14f802bcb38 req-375a0d7f-57e1-4b3b-91fc-c30cd4bd7996 service nova] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Updated VIF entry in instance network info cache for port 2d989e8c-d768-494a-a866-4da8ff809d05. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 762.756111] env[69927]: DEBUG nova.network.neutron [req-18817e23-e4f6-46aa-aa19-f14f802bcb38 req-375a0d7f-57e1-4b3b-91fc-c30cd4bd7996 service nova] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Updating instance_info_cache with network_info: [{"id": "2d989e8c-d768-494a-a866-4da8ff809d05", "address": "fa:16:3e:02:b9:e7", "network": {"id": "e48c0c04-ce1d-41db-8e56-0954dfca129f", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2007911751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0b560d18954fd68f7eceeb96c37055", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d989e8c-d7", "ovs_interfaceid": "2d989e8c-d768-494a-a866-4da8ff809d05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.830372] env[69927]: DEBUG nova.network.neutron [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Successfully updated port: 8c604e20-6cfc-4498-a689-d24876c07513 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 762.988730] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095650, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073742} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.988730] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 762.988730] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a34d260-a81f-4b83-a050-034f34a793ac {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.022556] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] 9d83dda3-5fb1-416d-9307-faeef454efec/9d83dda3-5fb1-416d-9307-faeef454efec.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 763.022933] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b097a16-d406-472f-b75a-fe6d6bdf319e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.053299] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 763.053299] env[69927]: value = "task-4095651" [ 763.053299] env[69927]: _type = "Task" [ 763.053299] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.065807] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095651, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.076313] env[69927]: DEBUG nova.compute.utils [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 763.079257] env[69927]: DEBUG nova.compute.manager [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Not allocating networking since 'none' was specified. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 763.260670] env[69927]: DEBUG oslo_concurrency.lockutils [req-18817e23-e4f6-46aa-aa19-f14f802bcb38 req-375a0d7f-57e1-4b3b-91fc-c30cd4bd7996 service nova] Releasing lock "refresh_cache-f6972b90-7746-4a37-8be8-1739f96dc3dc" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 763.339206] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquiring lock "refresh_cache-95c02aa2-d587-4c9f-9b02-2992dfe5b1be" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.339394] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquired lock "refresh_cache-95c02aa2-d587-4c9f-9b02-2992dfe5b1be" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 763.339394] env[69927]: DEBUG nova.network.neutron [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 763.568150] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095651, 'name': ReconfigVM_Task} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.568849] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Reconfigured VM instance instance-0000001c to attach disk [datastore1] 9d83dda3-5fb1-416d-9307-faeef454efec/9d83dda3-5fb1-416d-9307-faeef454efec.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 763.569918] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e0e8f92-fde6-4aed-aa4f-c1f93aa2505f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.580407] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 763.580407] env[69927]: value = "task-4095652" [ 763.580407] env[69927]: _type = "Task" [ 763.580407] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.585026] env[69927]: DEBUG nova.compute.manager [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 763.595968] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095652, 'name': Rename_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.784829] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6bc52fa-a723-4ef9-bf35-859b6efd5716 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.795909] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab111229-87fb-4a7b-899c-39dfeb4e0166 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.858197] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-454ab79d-b1f1-4828-b613-7a49e4b402e1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.868553] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6482289e-cfd8-4783-a738-f2beded053e0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.890688] env[69927]: DEBUG nova.compute.provider_tree [None req-42cf5530-a9a6-4592-8f5d-a2c041dffc3e tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 763.915149] env[69927]: DEBUG nova.network.neutron [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 764.102537] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095652, 'name': Rename_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.167272] env[69927]: DEBUG nova.network.neutron [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Updating instance_info_cache with network_info: [{"id": "8c604e20-6cfc-4498-a689-d24876c07513", "address": "fa:16:3e:13:7e:45", "network": {"id": "c75059d6-4484-4557-9698-a7b62151aeda", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1147445131-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81b92defba9241bca7e1db3e91030712", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c604e20-6c", "ovs_interfaceid": "8c604e20-6cfc-4498-a689-d24876c07513", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.394264] env[69927]: DEBUG nova.scheduler.client.report [None req-42cf5530-a9a6-4592-8f5d-a2c041dffc3e tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 764.595566] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095652, 'name': Rename_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.602896] env[69927]: DEBUG nova.compute.manager [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 764.671767] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Releasing lock "refresh_cache-95c02aa2-d587-4c9f-9b02-2992dfe5b1be" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 764.672193] env[69927]: DEBUG nova.compute.manager [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Instance network_info: |[{"id": "8c604e20-6cfc-4498-a689-d24876c07513", "address": "fa:16:3e:13:7e:45", "network": {"id": "c75059d6-4484-4557-9698-a7b62151aeda", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1147445131-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81b92defba9241bca7e1db3e91030712", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c604e20-6c", "ovs_interfaceid": "8c604e20-6cfc-4498-a689-d24876c07513", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 764.672796] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:7e:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f2e45023-22b5-458b-826e-9b7eb69ba028', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8c604e20-6cfc-4498-a689-d24876c07513', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 764.680567] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Creating folder: Project (81b92defba9241bca7e1db3e91030712). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 764.681323] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-46192782-a7df-42a4-916e-f17989a42ce6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.696387] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Created folder: Project (81b92defba9241bca7e1db3e91030712) in parent group-v811283. [ 764.696579] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Creating folder: Instances. Parent ref: group-v811374. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 764.696831] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-236e1bf0-37a2-4a37-ad61-baa8a99c3803 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.731486] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Created folder: Instances in parent group-v811374. [ 764.731486] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 764.731486] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 764.731486] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-034ad87d-d463-4738-927b-4bd7206ee3d1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.764096] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 764.764096] env[69927]: value = "task-4095655" [ 764.764096] env[69927]: _type = "Task" [ 764.764096] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.767416] env[69927]: DEBUG nova.virt.hardware [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 764.767777] env[69927]: DEBUG nova.virt.hardware [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 764.767873] env[69927]: DEBUG nova.virt.hardware [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 764.767995] env[69927]: DEBUG nova.virt.hardware [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 764.768153] env[69927]: DEBUG nova.virt.hardware [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 764.768299] env[69927]: DEBUG nova.virt.hardware [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 764.768509] env[69927]: DEBUG nova.virt.hardware [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 764.768794] env[69927]: DEBUG nova.virt.hardware [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 764.768850] env[69927]: DEBUG nova.virt.hardware [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 764.769193] env[69927]: DEBUG nova.virt.hardware [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 764.769193] env[69927]: DEBUG nova.virt.hardware [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 764.771070] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7438dd73-450c-4370-b3f2-15282a68e718 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.792160] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095655, 'name': CreateVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.798275] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95362de4-55c8-4d0f-b192-1dc8ddc99c27 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.813731] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Instance VIF info [] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 764.820117] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 764.820117] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 764.820378] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c03401bc-cd20-45f9-8c07-d868563a2860 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.842357] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 764.842357] env[69927]: value = "task-4095656" [ 764.842357] env[69927]: _type = "Task" [ 764.842357] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.853149] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095656, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.093929] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095652, 'name': Rename_Task, 'duration_secs': 1.228222} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.094315] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 765.094574] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4de7c88d-3899-48e6-b227-4857fe0c40d7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.102397] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 765.102397] env[69927]: value = "task-4095657" [ 765.102397] env[69927]: _type = "Task" [ 765.102397] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.111599] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095657, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.281134] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095655, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.302970] env[69927]: DEBUG nova.compute.manager [req-14ba596f-1ebe-44f4-87e9-e1ff61453c1a req-c3ca3f92-4f03-4e8f-9a9a-05d2c4f38aba service nova] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Received event network-changed-8c604e20-6cfc-4498-a689-d24876c07513 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 765.303209] env[69927]: DEBUG nova.compute.manager [req-14ba596f-1ebe-44f4-87e9-e1ff61453c1a req-c3ca3f92-4f03-4e8f-9a9a-05d2c4f38aba service nova] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Refreshing instance network info cache due to event network-changed-8c604e20-6cfc-4498-a689-d24876c07513. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 765.303432] env[69927]: DEBUG oslo_concurrency.lockutils [req-14ba596f-1ebe-44f4-87e9-e1ff61453c1a req-c3ca3f92-4f03-4e8f-9a9a-05d2c4f38aba service nova] Acquiring lock "refresh_cache-95c02aa2-d587-4c9f-9b02-2992dfe5b1be" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.303996] env[69927]: DEBUG oslo_concurrency.lockutils [req-14ba596f-1ebe-44f4-87e9-e1ff61453c1a req-c3ca3f92-4f03-4e8f-9a9a-05d2c4f38aba service nova] Acquired lock "refresh_cache-95c02aa2-d587-4c9f-9b02-2992dfe5b1be" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 765.304330] env[69927]: DEBUG nova.network.neutron [req-14ba596f-1ebe-44f4-87e9-e1ff61453c1a req-c3ca3f92-4f03-4e8f-9a9a-05d2c4f38aba service nova] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Refreshing network info cache for port 8c604e20-6cfc-4498-a689-d24876c07513 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 765.354581] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095656, 'name': CreateVM_Task, 'duration_secs': 0.369828} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.355146] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 765.355902] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.356359] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 765.356808] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 765.357179] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3408eba1-fb74-4667-a1c6-d3d7863529dd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.365292] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 765.365292] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521dba3e-5864-88c5-11dc-30318a1b9487" [ 765.365292] env[69927]: _type = "Task" [ 765.365292] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.377962] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521dba3e-5864-88c5-11dc-30318a1b9487, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.409362] env[69927]: DEBUG oslo_concurrency.lockutils [None req-42cf5530-a9a6-4592-8f5d-a2c041dffc3e tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.837s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.412904] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 26.851s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.413121] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.413282] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69927) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 765.413980] env[69927]: DEBUG oslo_concurrency.lockutils [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.314s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.416262] env[69927]: INFO nova.compute.claims [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 765.420101] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58380019-8590-4ab5-afe0-17dfe770de1f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.438029] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58141637-0377-4bb8-8261-4f971b185bc2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.453594] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-802f9802-ba59-4d39-aa8b-6b1d5b81cd97 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.462315] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1c0b0f9-95ee-44ad-8780-13e871389f50 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.498338] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179621MB free_disk=17GB free_vcpus=48 pci_devices=None {{(pid=69927) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 765.498559] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.614785] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095657, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.782926] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095655, 'name': CreateVM_Task, 'duration_secs': 0.846812} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.783211] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 765.783952] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.784155] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 765.784601] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 765.784740] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb1bfbe9-9937-4d67-98dc-5e571eafa018 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.792011] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for the task: (returnval){ [ 765.792011] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]524ba337-0229-f333-985a-0d0d7a54d742" [ 765.792011] env[69927]: _type = "Task" [ 765.792011] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.800448] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]524ba337-0229-f333-985a-0d0d7a54d742, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.878190] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521dba3e-5864-88c5-11dc-30318a1b9487, 'name': SearchDatastore_Task, 'duration_secs': 0.019523} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.878521] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 765.878835] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 765.879162] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.879318] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 765.879513] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 765.879792] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a31c82c4-8a33-4c44-8004-fd03b96c0d82 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.891153] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 765.891369] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 765.892099] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-241308ad-beba-4ccf-ac64-abeb45a4f8d6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.898380] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 765.898380] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522ccb3b-3110-d583-34aa-f66353f72f7e" [ 765.898380] env[69927]: _type = "Task" [ 765.898380] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.907278] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522ccb3b-3110-d583-34aa-f66353f72f7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.010569] env[69927]: INFO nova.scheduler.client.report [None req-42cf5530-a9a6-4592-8f5d-a2c041dffc3e tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Deleted allocation for migration 997e4693-06f5-485a-a436-c0324b8f738d [ 766.116152] env[69927]: DEBUG oslo_vmware.api [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095657, 'name': PowerOnVM_Task, 'duration_secs': 0.619511} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.116478] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 766.116687] env[69927]: INFO nova.compute.manager [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Took 9.34 seconds to spawn the instance on the hypervisor. [ 766.116870] env[69927]: DEBUG nova.compute.manager [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 766.117711] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81064e4-ca16-4950-bc91-5e397e249d1b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.209700] env[69927]: DEBUG nova.network.neutron [req-14ba596f-1ebe-44f4-87e9-e1ff61453c1a req-c3ca3f92-4f03-4e8f-9a9a-05d2c4f38aba service nova] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Updated VIF entry in instance network info cache for port 8c604e20-6cfc-4498-a689-d24876c07513. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 766.209700] env[69927]: DEBUG nova.network.neutron [req-14ba596f-1ebe-44f4-87e9-e1ff61453c1a req-c3ca3f92-4f03-4e8f-9a9a-05d2c4f38aba service nova] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Updating instance_info_cache with network_info: [{"id": "8c604e20-6cfc-4498-a689-d24876c07513", "address": "fa:16:3e:13:7e:45", "network": {"id": "c75059d6-4484-4557-9698-a7b62151aeda", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1147445131-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81b92defba9241bca7e1db3e91030712", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c604e20-6c", "ovs_interfaceid": "8c604e20-6cfc-4498-a689-d24876c07513", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.302591] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]524ba337-0229-f333-985a-0d0d7a54d742, 'name': SearchDatastore_Task, 'duration_secs': 0.025082} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.302998] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 766.303274] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 766.303411] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.303556] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 766.303767] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 766.304084] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3186a5f-ee8f-4b10-9f54-f2f505a986b9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.315743] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 766.315999] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 766.316886] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3273313c-df9a-4739-81ff-8d0923c764eb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.325170] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for the task: (returnval){ [ 766.325170] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b4d509-4c11-19c5-0ca5-1ab5793386e4" [ 766.325170] env[69927]: _type = "Task" [ 766.325170] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.336660] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b4d509-4c11-19c5-0ca5-1ab5793386e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.411667] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522ccb3b-3110-d583-34aa-f66353f72f7e, 'name': SearchDatastore_Task, 'duration_secs': 0.03099} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.412491] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b47c4e79-2907-4fb3-9caf-d11051c46bd9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.419253] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 766.419253] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52bf5c09-aa5d-1fce-ebd2-ca1cd186a203" [ 766.419253] env[69927]: _type = "Task" [ 766.419253] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.432957] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52bf5c09-aa5d-1fce-ebd2-ca1cd186a203, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.527633] env[69927]: DEBUG oslo_concurrency.lockutils [None req-42cf5530-a9a6-4592-8f5d-a2c041dffc3e tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "9348e368-cc3c-4bde-91ae-26fd03ad536a" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 32.061s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 766.636818] env[69927]: INFO nova.compute.manager [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Took 44.33 seconds to build instance. [ 766.712183] env[69927]: DEBUG oslo_concurrency.lockutils [req-14ba596f-1ebe-44f4-87e9-e1ff61453c1a req-c3ca3f92-4f03-4e8f-9a9a-05d2c4f38aba service nova] Releasing lock "refresh_cache-95c02aa2-d587-4c9f-9b02-2992dfe5b1be" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 766.841686] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b4d509-4c11-19c5-0ca5-1ab5793386e4, 'name': SearchDatastore_Task, 'duration_secs': 0.038622} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.848306] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31c19ed2-7ec9-4cc2-99da-d474d3813298 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.853578] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for the task: (returnval){ [ 766.853578] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52bb5b28-f321-254b-bb51-3b2d7bc10cd9" [ 766.853578] env[69927]: _type = "Task" [ 766.853578] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.869830] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52bb5b28-f321-254b-bb51-3b2d7bc10cd9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.939510] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52bf5c09-aa5d-1fce-ebd2-ca1cd186a203, 'name': SearchDatastore_Task, 'duration_secs': 0.032571} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.939778] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 766.940049] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] bf4bee47-36ce-43ee-96f1-96f262882986/bf4bee47-36ce-43ee-96f1-96f262882986.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 766.940324] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f99a085-0a94-4c0b-bd06-278d3fda1b7a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.019859] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 767.019859] env[69927]: value = "task-4095658" [ 767.019859] env[69927]: _type = "Task" [ 767.019859] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.032377] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095658, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.148543] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9f2a7c47-7686-4cca-a7b6-59647d333396 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "9d83dda3-5fb1-416d-9307-faeef454efec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.958s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.194908] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba08d38-42ac-4a2d-8170-bc706e2bfd99 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.207689] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6418f267-8152-499a-bb28-4fcc0dee7161 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.240501] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa3080fb-d275-4f64-b979-0d32243e2dbb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.251175] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b520319a-0783-454a-97c4-45aed3c48f20 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.268176] env[69927]: DEBUG nova.compute.provider_tree [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 767.369484] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52bb5b28-f321-254b-bb51-3b2d7bc10cd9, 'name': SearchDatastore_Task, 'duration_secs': 0.028096} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.369948] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 767.370439] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 95c02aa2-d587-4c9f-9b02-2992dfe5b1be/95c02aa2-d587-4c9f-9b02-2992dfe5b1be.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 767.370811] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8dcada79-4164-4141-bf0c-8daeb9b231d8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.384139] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for the task: (returnval){ [ 767.384139] env[69927]: value = "task-4095659" [ 767.384139] env[69927]: _type = "Task" [ 767.384139] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.401266] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4095659, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.533748] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095658, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.659136] env[69927]: DEBUG nova.compute.manager [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 767.771545] env[69927]: DEBUG nova.scheduler.client.report [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 767.899286] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4095659, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.038647] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095658, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.742581} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.039358] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] bf4bee47-36ce-43ee-96f1-96f262882986/bf4bee47-36ce-43ee-96f1-96f262882986.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 768.039870] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 768.040321] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f0d70429-5bdb-4fb2-ae1a-e62d2c450b37 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.054562] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 768.054562] env[69927]: value = "task-4095660" [ 768.054562] env[69927]: _type = "Task" [ 768.054562] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.066851] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095660, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.189809] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.280343] env[69927]: DEBUG oslo_concurrency.lockutils [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.866s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 768.280343] env[69927]: DEBUG nova.compute.manager [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 768.283694] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.115s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 768.283837] env[69927]: DEBUG nova.objects.instance [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lazy-loading 'resources' on Instance uuid 39421358-2d66-4fbe-a4e0-8fdb0b420c5e {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 768.402823] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4095659, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.913521} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.402948] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 95c02aa2-d587-4c9f-9b02-2992dfe5b1be/95c02aa2-d587-4c9f-9b02-2992dfe5b1be.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 768.403228] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 768.403517] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fc5371c6-a7b7-48bc-9750-9159959f7a1c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.413095] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for the task: (returnval){ [ 768.413095] env[69927]: value = "task-4095661" [ 768.413095] env[69927]: _type = "Task" [ 768.413095] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.424944] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4095661, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.566476] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095660, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.222553} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.566808] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 768.567979] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d6379d-1365-48b1-82dd-67a5c4871d98 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.590823] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Reconfiguring VM instance instance-0000001e to attach disk [datastore2] bf4bee47-36ce-43ee-96f1-96f262882986/bf4bee47-36ce-43ee-96f1-96f262882986.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 768.592018] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed876966-18fc-4288-a677-d27080903a08 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.613978] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 768.613978] env[69927]: value = "task-4095662" [ 768.613978] env[69927]: _type = "Task" [ 768.613978] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.623917] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095662, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.790829] env[69927]: DEBUG nova.compute.utils [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 768.792603] env[69927]: DEBUG nova.compute.manager [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Not allocating networking since 'none' was specified. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 768.933059] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4095661, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083756} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.937127] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 768.938300] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070e9368-6655-4ecb-bc7e-cbe48f11fc75 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.965686] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] 95c02aa2-d587-4c9f-9b02-2992dfe5b1be/95c02aa2-d587-4c9f-9b02-2992dfe5b1be.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 768.970291] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c0f826c-f9f3-4485-a98e-91ff688953ad {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.995190] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for the task: (returnval){ [ 768.995190] env[69927]: value = "task-4095663" [ 768.995190] env[69927]: _type = "Task" [ 768.995190] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.005586] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4095663, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.126435] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095662, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.280013] env[69927]: INFO nova.compute.manager [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Rebuilding instance [ 769.295015] env[69927]: DEBUG nova.compute.manager [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 769.347371] env[69927]: DEBUG nova.compute.manager [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 769.350032] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a64dd0-dcdd-4cae-9a94-10d413af2201 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.452294] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c46baf-bebd-4eac-bb8e-d2bdd719ba1f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.461277] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4de95af-c07f-4f3a-a969-f38651423016 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.501409] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e0e8e02-8a73-4cdb-8d46-49f311447ad9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.512964] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d208fa-5f73-4172-8d2c-9b6c01234f7f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.517257] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4095663, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.528944] env[69927]: DEBUG nova.compute.provider_tree [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 769.629708] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095662, 'name': ReconfigVM_Task, 'duration_secs': 0.517284} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.630716] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Reconfigured VM instance instance-0000001e to attach disk [datastore2] bf4bee47-36ce-43ee-96f1-96f262882986/bf4bee47-36ce-43ee-96f1-96f262882986.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 769.631379] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5f94b06c-7ee8-4978-bc7c-2250bf848ee3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.639372] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 769.639372] env[69927]: value = "task-4095664" [ 769.639372] env[69927]: _type = "Task" [ 769.639372] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.650546] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095664, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.008468] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4095663, 'name': ReconfigVM_Task, 'duration_secs': 0.638562} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.009265] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Reconfigured VM instance instance-0000001d to attach disk [datastore1] 95c02aa2-d587-4c9f-9b02-2992dfe5b1be/95c02aa2-d587-4c9f-9b02-2992dfe5b1be.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 770.009934] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f60fc1e7-36a0-4d93-a1cf-5a14a3055f35 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.017274] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for the task: (returnval){ [ 770.017274] env[69927]: value = "task-4095665" [ 770.017274] env[69927]: _type = "Task" [ 770.017274] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.028584] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4095665, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.032672] env[69927]: DEBUG nova.scheduler.client.report [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 770.113478] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Acquiring lock "6e698775-2556-4cbe-b65f-0cc3efa7bcf6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 770.113776] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Lock "6e698775-2556-4cbe-b65f-0cc3efa7bcf6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.114037] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Acquiring lock "6e698775-2556-4cbe-b65f-0cc3efa7bcf6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 770.114267] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Lock "6e698775-2556-4cbe-b65f-0cc3efa7bcf6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.114462] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Lock "6e698775-2556-4cbe-b65f-0cc3efa7bcf6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.116880] env[69927]: INFO nova.compute.manager [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Terminating instance [ 770.150068] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095664, 'name': Rename_Task, 'duration_secs': 0.204828} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.150365] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 770.150618] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de6a3817-57b9-4d1d-be28-f519ebaf7fb1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.157598] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 770.157598] env[69927]: value = "task-4095666" [ 770.157598] env[69927]: _type = "Task" [ 770.157598] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.167383] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095666, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.367931] env[69927]: DEBUG nova.compute.manager [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 770.374452] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 770.374804] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f68b2015-f56e-4a6b-9c27-c0d809c93c53 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.386299] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 770.386299] env[69927]: value = "task-4095667" [ 770.386299] env[69927]: _type = "Task" [ 770.386299] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.395759] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095667, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.415036] env[69927]: DEBUG nova.virt.hardware [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 770.415036] env[69927]: DEBUG nova.virt.hardware [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 770.415036] env[69927]: DEBUG nova.virt.hardware [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 770.415036] env[69927]: DEBUG nova.virt.hardware [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 770.415348] env[69927]: DEBUG nova.virt.hardware [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 770.415348] env[69927]: DEBUG nova.virt.hardware [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 770.415348] env[69927]: DEBUG nova.virt.hardware [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 770.415348] env[69927]: DEBUG nova.virt.hardware [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 770.415348] env[69927]: DEBUG nova.virt.hardware [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 770.415495] env[69927]: DEBUG nova.virt.hardware [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 770.415495] env[69927]: DEBUG nova.virt.hardware [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 770.415495] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09826eea-d6ff-4494-9856-d76cc12773bf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.425152] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e9e469-bed0-4bff-a7e2-86162b7a351b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.444687] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Instance VIF info [] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 770.453891] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Creating folder: Project (258ea84abbf54ee69d937621c3c345d8). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 770.454396] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4941f614-2558-48fa-aca0-bbc5b33821bd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.469316] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Created folder: Project (258ea84abbf54ee69d937621c3c345d8) in parent group-v811283. [ 770.469547] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Creating folder: Instances. Parent ref: group-v811378. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 770.469843] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-606fff29-5554-4d09-b593-ad88946d14ce {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.485034] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Created folder: Instances in parent group-v811378. [ 770.485034] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 770.485034] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 770.485034] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9ccb56f6-c772-4ce9-ba04-ce7ee1ff93c4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.505421] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 770.505421] env[69927]: value = "task-4095670" [ 770.505421] env[69927]: _type = "Task" [ 770.505421] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.515481] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095670, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.531026] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4095665, 'name': Rename_Task, 'duration_secs': 0.155608} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.531026] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 770.531026] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7ba2f57f-cbc3-4663-8e34-f3b738a47c50 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.538799] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.254s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.542246] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.333s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.542246] env[69927]: DEBUG nova.objects.instance [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Lazy-loading 'resources' on Instance uuid 2cdfda66-1d93-4960-a129-2788f10fa593 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 770.543898] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for the task: (returnval){ [ 770.543898] env[69927]: value = "task-4095671" [ 770.543898] env[69927]: _type = "Task" [ 770.543898] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.555621] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4095671, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.580300] env[69927]: INFO nova.scheduler.client.report [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Deleted allocations for instance 39421358-2d66-4fbe-a4e0-8fdb0b420c5e [ 770.621499] env[69927]: DEBUG nova.compute.manager [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 770.622164] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 770.623344] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40083a4d-0bf9-45bc-93b3-220d4930c71b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.635029] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 770.635548] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1ec8fe52-0e84-461e-b044-fa32d8c89a19 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.644189] env[69927]: DEBUG oslo_vmware.api [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Waiting for the task: (returnval){ [ 770.644189] env[69927]: value = "task-4095672" [ 770.644189] env[69927]: _type = "Task" [ 770.644189] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.658521] env[69927]: DEBUG oslo_vmware.api [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Task: {'id': task-4095672, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.669576] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095666, 'name': PowerOnVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.773352] env[69927]: DEBUG oslo_vmware.rw_handles [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d75648-a327-9f35-1310-7a2cb5c89e0b/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 770.774940] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415ea43b-7ae5-4504-8795-b72d91248233 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.789111] env[69927]: DEBUG oslo_vmware.rw_handles [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d75648-a327-9f35-1310-7a2cb5c89e0b/disk-0.vmdk is in state: ready. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 770.789111] env[69927]: ERROR oslo_vmware.rw_handles [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d75648-a327-9f35-1310-7a2cb5c89e0b/disk-0.vmdk due to incomplete transfer. [ 770.789111] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-accf3a75-99c2-4113-8745-eb615c615d76 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.808852] env[69927]: DEBUG oslo_vmware.rw_handles [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d75648-a327-9f35-1310-7a2cb5c89e0b/disk-0.vmdk. {{(pid=69927) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 770.808852] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Uploaded image 00f6ea29-ebc1-44c9-85c4-33a5df1836df to the Glance image server {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 770.810703] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Destroying the VM {{(pid=69927) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 770.811153] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-79a7a393-7221-4625-bc46-f14027938ddc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.820373] env[69927]: DEBUG oslo_vmware.api [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 770.820373] env[69927]: value = "task-4095673" [ 770.820373] env[69927]: _type = "Task" [ 770.820373] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.831311] env[69927]: DEBUG oslo_vmware.api [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095673, 'name': Destroy_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.899027] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095667, 'name': PowerOffVM_Task, 'duration_secs': 0.254886} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.899027] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 770.899027] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 770.899027] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-023f1ce3-f7eb-47b7-9326-c3cec7bd8059 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.906543] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 770.907095] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5627bd4c-0f1b-4236-a19a-095c72c268b7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.984591] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 770.985029] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 770.985029] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Deleting the datastore file [datastore1] 9d83dda3-5fb1-416d-9307-faeef454efec {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 770.985286] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f12976b5-74fd-4e18-8869-f96479751786 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.999562] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 770.999562] env[69927]: value = "task-4095675" [ 770.999562] env[69927]: _type = "Task" [ 770.999562] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.017408] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095675, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.021023] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095670, 'name': CreateVM_Task, 'duration_secs': 0.354922} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.021810] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 771.021810] env[69927]: DEBUG oslo_concurrency.lockutils [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.021810] env[69927]: DEBUG oslo_concurrency.lockutils [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.022272] env[69927]: DEBUG oslo_concurrency.lockutils [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 771.022456] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25508097-9fa3-4eb6-81f8-c7acb5616af9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.028589] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Waiting for the task: (returnval){ [ 771.028589] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52fe1172-81ea-13c9-7246-91ab83027787" [ 771.028589] env[69927]: _type = "Task" [ 771.028589] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.037944] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52fe1172-81ea-13c9-7246-91ab83027787, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.060438] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4095671, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.097032] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4b83d762-4cb8-45ee-97f3-22503fb14b61 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "39421358-2d66-4fbe-a4e0-8fdb0b420c5e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.567s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.157104] env[69927]: DEBUG oslo_vmware.api [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Task: {'id': task-4095672, 'name': PowerOffVM_Task, 'duration_secs': 0.202318} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.157228] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 771.157922] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 771.157922] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7bbe2aba-b517-4c7c-95a4-52a2cb278ac6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.177445] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095666, 'name': PowerOnVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.246368] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 771.246601] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 771.246796] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Deleting the datastore file [datastore1] 6e698775-2556-4cbe-b65f-0cc3efa7bcf6 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 771.247157] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-760cfdb0-1ca6-471d-90ef-572d7ab438f6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.258834] env[69927]: DEBUG oslo_vmware.api [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Waiting for the task: (returnval){ [ 771.258834] env[69927]: value = "task-4095677" [ 771.258834] env[69927]: _type = "Task" [ 771.258834] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.278158] env[69927]: DEBUG oslo_vmware.api [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Task: {'id': task-4095677, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.332721] env[69927]: DEBUG oslo_vmware.api [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095673, 'name': Destroy_Task, 'duration_secs': 0.436461} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.337961] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Destroyed the VM [ 771.339256] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Deleting Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 771.342554] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-17a8bfa5-c1c5-4eed-ba1f-85deaace9580 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.349990] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "c3e8a429-8484-4b11-abe3-1cccf0992556" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 771.350252] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "c3e8a429-8484-4b11-abe3-1cccf0992556" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 771.358857] env[69927]: DEBUG oslo_vmware.api [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 771.358857] env[69927]: value = "task-4095678" [ 771.358857] env[69927]: _type = "Task" [ 771.358857] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.372932] env[69927]: DEBUG oslo_vmware.api [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095678, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.513522] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095675, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.206444} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.513832] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 771.514050] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 771.514310] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 771.539931] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52fe1172-81ea-13c9-7246-91ab83027787, 'name': SearchDatastore_Task, 'duration_secs': 0.013327} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.540313] env[69927]: DEBUG oslo_concurrency.lockutils [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 771.540643] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 771.540896] env[69927]: DEBUG oslo_concurrency.lockutils [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.541093] env[69927]: DEBUG oslo_concurrency.lockutils [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.541328] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 771.542290] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-576c507f-6175-462e-a506-d87693fd770c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.564030] env[69927]: DEBUG oslo_vmware.api [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4095671, 'name': PowerOnVM_Task, 'duration_secs': 0.694893} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.564030] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 771.564030] env[69927]: INFO nova.compute.manager [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Took 9.78 seconds to spawn the instance on the hypervisor. [ 771.564328] env[69927]: DEBUG nova.compute.manager [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 771.564463] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 771.564685] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 771.566209] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-624a97a1-b4de-4d7a-800a-77d31e1dc2a2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.571192] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d37175e7-e165-42dc-8ec7-349382be9a22 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.583894] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Waiting for the task: (returnval){ [ 771.583894] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5266ed96-e3b8-5304-81a7-2cbe66292f48" [ 771.583894] env[69927]: _type = "Task" [ 771.583894] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.606320] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5266ed96-e3b8-5304-81a7-2cbe66292f48, 'name': SearchDatastore_Task, 'duration_secs': 0.011419} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.606428] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27e04834-93fd-4926-ab7f-61dd59ce425b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.613880] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Waiting for the task: (returnval){ [ 771.613880] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520507b5-44d0-00da-9bcb-221cf480b0c8" [ 771.613880] env[69927]: _type = "Task" [ 771.613880] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.630668] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520507b5-44d0-00da-9bcb-221cf480b0c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.677080] env[69927]: DEBUG oslo_vmware.api [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095666, 'name': PowerOnVM_Task, 'duration_secs': 1.296124} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.677332] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 771.677536] env[69927]: INFO nova.compute.manager [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Took 7.08 seconds to spawn the instance on the hypervisor. [ 771.677716] env[69927]: DEBUG nova.compute.manager [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 771.681839] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3345eebe-4cb9-47ff-8376-d72e62981707 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.759785] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae2a4141-7f6a-41a1-b0e6-e3050e9d0588 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.772515] env[69927]: DEBUG oslo_vmware.api [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Task: {'id': task-4095677, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169568} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.774588] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 771.774790] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 771.774967] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 771.775160] env[69927]: INFO nova.compute.manager [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Took 1.15 seconds to destroy the instance on the hypervisor. [ 771.775523] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 771.775798] env[69927]: DEBUG nova.compute.manager [-] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 771.775798] env[69927]: DEBUG nova.network.neutron [-] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 771.778205] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d50d3f9a-b58e-4f1b-bf99-246814c419a6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.812063] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-addd55c9-c1ae-46bb-9610-a5ab70562e1b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.821080] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df78422b-30f8-48de-a845-c2250c854437 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.840219] env[69927]: DEBUG nova.compute.provider_tree [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 771.880649] env[69927]: DEBUG oslo_vmware.api [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095678, 'name': RemoveSnapshot_Task} progress is 50%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.105571] env[69927]: INFO nova.compute.manager [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Took 40.91 seconds to build instance. [ 772.128505] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520507b5-44d0-00da-9bcb-221cf480b0c8, 'name': SearchDatastore_Task, 'duration_secs': 0.015752} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.128939] env[69927]: DEBUG oslo_concurrency.lockutils [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 772.129315] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] c6a06550-33ed-4fee-bd37-3fce9c55b235/c6a06550-33ed-4fee-bd37-3fce9c55b235.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 772.129704] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2f75dd37-a5e0-4b4f-9a64-d182f92fe8fd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.141034] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Waiting for the task: (returnval){ [ 772.141034] env[69927]: value = "task-4095679" [ 772.141034] env[69927]: _type = "Task" [ 772.141034] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.152396] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095679, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.200187] env[69927]: INFO nova.compute.manager [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Took 36.14 seconds to build instance. [ 772.347301] env[69927]: DEBUG nova.scheduler.client.report [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 772.380108] env[69927]: DEBUG oslo_vmware.api [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095678, 'name': RemoveSnapshot_Task, 'duration_secs': 0.894675} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.380108] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Deleted Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 772.380108] env[69927]: INFO nova.compute.manager [None req-fea60ecb-49bb-4543-8528-347d24380358 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Took 17.90 seconds to snapshot the instance on the hypervisor. [ 772.577092] env[69927]: DEBUG nova.virt.hardware [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 772.577322] env[69927]: DEBUG nova.virt.hardware [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 772.577450] env[69927]: DEBUG nova.virt.hardware [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 772.577630] env[69927]: DEBUG nova.virt.hardware [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 772.577775] env[69927]: DEBUG nova.virt.hardware [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 772.577921] env[69927]: DEBUG nova.virt.hardware [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 772.578349] env[69927]: DEBUG nova.virt.hardware [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 772.578555] env[69927]: DEBUG nova.virt.hardware [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 772.578731] env[69927]: DEBUG nova.virt.hardware [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 772.578896] env[69927]: DEBUG nova.virt.hardware [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 772.579086] env[69927]: DEBUG nova.virt.hardware [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 772.581552] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56966993-7b3a-42eb-bc4c-b6bcfe99bdc3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.591191] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c91cf98-64f4-4f54-8e3f-a3d05dbb8dd0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.611817] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9d63c26c-4b2b-4a96-b4d1-fe3211629c7b tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "95c02aa2-d587-4c9f-9b02-2992dfe5b1be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.118s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.612664] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:d2:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '415e68b4-3766-4359-afe2-f8563910d98c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9fe1cc7b-35d7-4f3c-84fa-29a7b02aaf97', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 772.621766] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 772.623164] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 772.623865] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3e70d6e8-0ad2-46ac-9983-0a3a5457e4a2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.653246] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095679, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.655087] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 772.655087] env[69927]: value = "task-4095680" [ 772.655087] env[69927]: _type = "Task" [ 772.655087] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.669764] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095680, 'name': CreateVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.703464] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bf257ccc-f580-4b33-bc69-f5b0c9dfc8c9 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Lock "bf4bee47-36ce-43ee-96f1-96f262882986" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.259s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.850693] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.309s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.853957] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.306s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 772.853957] env[69927]: DEBUG nova.objects.instance [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lazy-loading 'resources' on Instance uuid 128d0705-21a0-4103-ae84-85bbac7e718b {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 772.872646] env[69927]: INFO nova.scheduler.client.report [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Deleted allocations for instance 2cdfda66-1d93-4960-a129-2788f10fa593 [ 772.956023] env[69927]: DEBUG nova.network.neutron [-] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.073802] env[69927]: DEBUG nova.compute.manager [req-cfaad9ac-4663-4737-b043-a597c556a247 req-8f213975-2e96-456b-ba83-3359e5dbf284 service nova] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Received event network-vif-deleted-32ace01f-025d-4978-a510-c851c8daf246 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 773.093039] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "66ba8086-2dd4-4d02-aac3-1bbb4a404784" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 773.093039] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "66ba8086-2dd4-4d02-aac3-1bbb4a404784" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 773.145586] env[69927]: DEBUG nova.compute.manager [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 773.158431] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095679, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.714718} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.162017] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] c6a06550-33ed-4fee-bd37-3fce9c55b235/c6a06550-33ed-4fee-bd37-3fce9c55b235.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 773.162017] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 773.162196] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f138baee-e411-484d-b5df-575b14cde2ba {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.171337] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095680, 'name': CreateVM_Task, 'duration_secs': 0.483419} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.172743] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 773.174204] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Waiting for the task: (returnval){ [ 773.174204] env[69927]: value = "task-4095681" [ 773.174204] env[69927]: _type = "Task" [ 773.174204] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.174204] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.174204] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 773.174486] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 773.174669] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d5be84d-54bb-417e-bc63-b0c2ea2a1def {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.187088] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 773.187088] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52fd954f-617c-8396-38f1-bab4426e860a" [ 773.187088] env[69927]: _type = "Task" [ 773.187088] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.190999] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095681, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.197684] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52fd954f-617c-8396-38f1-bab4426e860a, 'name': SearchDatastore_Task, 'duration_secs': 0.010321} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.197997] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 773.198255] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 773.198495] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.198638] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 773.198822] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 773.199146] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b0a559fc-c051-415f-aaa8-8cf88d4b3aad {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.210459] env[69927]: DEBUG nova.compute.manager [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 773.213491] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 773.213698] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 773.214912] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-211948a0-2a5e-4c22-888c-ccddf035e2b6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.223682] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 773.223682] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521041ea-8735-44e9-fb4c-2512724e0b09" [ 773.223682] env[69927]: _type = "Task" [ 773.223682] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.232797] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521041ea-8735-44e9-fb4c-2512724e0b09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.385174] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6879b171-1fb9-4fa7-8796-fb4e2d410e32 tempest-ImagesOneServerTestJSON-280648197 tempest-ImagesOneServerTestJSON-280648197-project-member] Lock "2cdfda66-1d93-4960-a129-2788f10fa593" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.813s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 773.459842] env[69927]: INFO nova.compute.manager [-] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Took 1.68 seconds to deallocate network for instance. [ 773.676702] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 773.689782] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095681, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06951} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.689782] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 773.690338] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a7313b-753d-4876-a258-d1cbcc407263 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.714412] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] c6a06550-33ed-4fee-bd37-3fce9c55b235/c6a06550-33ed-4fee-bd37-3fce9c55b235.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 773.717820] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-14966cc2-13c8-4bf1-b082-d21ba62a7943 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.751320] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521041ea-8735-44e9-fb4c-2512724e0b09, 'name': SearchDatastore_Task, 'duration_secs': 0.011585} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.754317] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Waiting for the task: (returnval){ [ 773.754317] env[69927]: value = "task-4095682" [ 773.754317] env[69927]: _type = "Task" [ 773.754317] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.754317] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc9acbef-d6aa-41c1-b1eb-5bb296918503 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.762015] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 773.764636] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 773.764636] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5236fe7a-f5a4-4075-c833-bcf0dd663479" [ 773.764636] env[69927]: _type = "Task" [ 773.764636] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.773210] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095682, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.783207] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5236fe7a-f5a4-4075-c833-bcf0dd663479, 'name': SearchDatastore_Task, 'duration_secs': 0.011397} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.783207] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 773.783207] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 9d83dda3-5fb1-416d-9307-faeef454efec/9d83dda3-5fb1-416d-9307-faeef454efec.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 773.783207] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-67736cae-a875-4276-97a0-023fd54eb6aa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.792242] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 773.792242] env[69927]: value = "task-4095683" [ 773.792242] env[69927]: _type = "Task" [ 773.792242] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.802101] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095683, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.970988] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.067225] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37303838-50fc-4225-8a3e-1d07e47a3e50 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.080084] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f3532c3-3b45-4a9f-b90c-181feceb1451 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.119638] env[69927]: INFO nova.compute.manager [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Rebuilding instance [ 774.124450] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-395b7131-bc8a-455f-9d41-e137e2e192b7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.140670] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8b60c9-51a7-4cf7-9f9f-721c48f822e4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.158045] env[69927]: DEBUG nova.compute.provider_tree [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 774.206957] env[69927]: DEBUG nova.compute.manager [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 774.208215] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7f1d6f-1852-44da-b868-ae27d9e087f9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.268151] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095682, 'name': ReconfigVM_Task, 'duration_secs': 0.406111} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.268464] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Reconfigured VM instance instance-0000001f to attach disk [datastore2] c6a06550-33ed-4fee-bd37-3fce9c55b235/c6a06550-33ed-4fee-bd37-3fce9c55b235.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 774.269136] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-00a49bd9-5ac0-4e55-ac9d-73046271cbe0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.279627] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Waiting for the task: (returnval){ [ 774.279627] env[69927]: value = "task-4095684" [ 774.279627] env[69927]: _type = "Task" [ 774.279627] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.289408] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095684, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.302219] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095683, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509759} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.302540] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 9d83dda3-5fb1-416d-9307-faeef454efec/9d83dda3-5fb1-416d-9307-faeef454efec.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 774.302802] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 774.303113] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8c465338-a4ed-4ff6-8f40-1e7b869cefe4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.311546] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 774.311546] env[69927]: value = "task-4095685" [ 774.311546] env[69927]: _type = "Task" [ 774.311546] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.321202] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095685, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.665153] env[69927]: DEBUG nova.scheduler.client.report [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 774.797570] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095684, 'name': Rename_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.824823] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095685, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.259378} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.825156] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 774.826375] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d96feaab-3f1c-4cc6-b08b-daa8c91b004f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.855981] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Reconfiguring VM instance instance-0000001c to attach disk [datastore2] 9d83dda3-5fb1-416d-9307-faeef454efec/9d83dda3-5fb1-416d-9307-faeef454efec.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 774.856480] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21461432-6baa-4db5-bc28-e64cb1c2e5f2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.880085] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 774.880085] env[69927]: value = "task-4095686" [ 774.880085] env[69927]: _type = "Task" [ 774.880085] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.890561] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095686, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.111970] env[69927]: DEBUG nova.compute.manager [req-ec2af01e-11f1-4800-bb28-c6906f3ddb26 req-5f443b08-f737-44a6-9c21-5c8fd783e5a1 service nova] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Received event network-changed-8c604e20-6cfc-4498-a689-d24876c07513 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 775.112204] env[69927]: DEBUG nova.compute.manager [req-ec2af01e-11f1-4800-bb28-c6906f3ddb26 req-5f443b08-f737-44a6-9c21-5c8fd783e5a1 service nova] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Refreshing instance network info cache due to event network-changed-8c604e20-6cfc-4498-a689-d24876c07513. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 775.112445] env[69927]: DEBUG oslo_concurrency.lockutils [req-ec2af01e-11f1-4800-bb28-c6906f3ddb26 req-5f443b08-f737-44a6-9c21-5c8fd783e5a1 service nova] Acquiring lock "refresh_cache-95c02aa2-d587-4c9f-9b02-2992dfe5b1be" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.113071] env[69927]: DEBUG oslo_concurrency.lockutils [req-ec2af01e-11f1-4800-bb28-c6906f3ddb26 req-5f443b08-f737-44a6-9c21-5c8fd783e5a1 service nova] Acquired lock "refresh_cache-95c02aa2-d587-4c9f-9b02-2992dfe5b1be" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 775.113071] env[69927]: DEBUG nova.network.neutron [req-ec2af01e-11f1-4800-bb28-c6906f3ddb26 req-5f443b08-f737-44a6-9c21-5c8fd783e5a1 service nova] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Refreshing network info cache for port 8c604e20-6cfc-4498-a689-d24876c07513 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 775.141296] env[69927]: DEBUG nova.compute.manager [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 775.141984] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd3ba00-19b3-450a-a5fa-c311d112ae07 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.176292] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.323s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 775.180719] env[69927]: DEBUG oslo_concurrency.lockutils [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.098s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 775.182622] env[69927]: INFO nova.compute.claims [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 775.226366] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 775.226681] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea197c98-6dbc-43a4-b523-cefe32535398 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.231901] env[69927]: INFO nova.scheduler.client.report [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Deleted allocations for instance 128d0705-21a0-4103-ae84-85bbac7e718b [ 775.238146] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 775.238146] env[69927]: value = "task-4095687" [ 775.238146] env[69927]: _type = "Task" [ 775.238146] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.253181] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095687, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.297105] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095684, 'name': Rename_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.390124] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095686, 'name': ReconfigVM_Task, 'duration_secs': 0.43311} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.390124] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Reconfigured VM instance instance-0000001c to attach disk [datastore2] 9d83dda3-5fb1-416d-9307-faeef454efec/9d83dda3-5fb1-416d-9307-faeef454efec.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 775.390899] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-97273f6e-0a92-499c-a0de-3ec5df1a949c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.400919] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 775.400919] env[69927]: value = "task-4095688" [ 775.400919] env[69927]: _type = "Task" [ 775.400919] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.412114] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095688, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.658034] env[69927]: INFO nova.compute.manager [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] instance snapshotting [ 775.662028] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f88cf93d-f466-48c7-8a7b-5ca7209721c1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.692771] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d40d6cc2-7f44-4419-b603-f43a84bd38eb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.745450] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4f6c4883-a10d-4dae-bd1f-63bf4816d4b2 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "128d0705-21a0-4103-ae84-85bbac7e718b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.220s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 775.756416] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095687, 'name': PowerOffVM_Task, 'duration_secs': 0.370503} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.759353] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 775.759611] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 775.760423] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c9c556b-597a-4c55-875d-d1035ff8b75e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.768755] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 775.768994] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7d35857a-e1b7-48a9-aacd-08320375d880 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.800224] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 775.800377] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 775.800571] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Deleting the datastore file [datastore2] bf4bee47-36ce-43ee-96f1-96f262882986 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 775.804283] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4bc01d52-03ca-4981-9eec-be3cc7d2e664 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.806330] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095684, 'name': Rename_Task, 'duration_secs': 1.182744} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.806851] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 775.807340] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3041467a-75c7-4132-b40c-623a83310937 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.812274] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 775.812274] env[69927]: value = "task-4095690" [ 775.812274] env[69927]: _type = "Task" [ 775.812274] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.817306] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Waiting for the task: (returnval){ [ 775.817306] env[69927]: value = "task-4095691" [ 775.817306] env[69927]: _type = "Task" [ 775.817306] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.823979] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095690, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.830125] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095691, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.909516] env[69927]: DEBUG nova.network.neutron [req-ec2af01e-11f1-4800-bb28-c6906f3ddb26 req-5f443b08-f737-44a6-9c21-5c8fd783e5a1 service nova] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Updated VIF entry in instance network info cache for port 8c604e20-6cfc-4498-a689-d24876c07513. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 775.909924] env[69927]: DEBUG nova.network.neutron [req-ec2af01e-11f1-4800-bb28-c6906f3ddb26 req-5f443b08-f737-44a6-9c21-5c8fd783e5a1 service nova] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Updating instance_info_cache with network_info: [{"id": "8c604e20-6cfc-4498-a689-d24876c07513", "address": "fa:16:3e:13:7e:45", "network": {"id": "c75059d6-4484-4557-9698-a7b62151aeda", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1147445131-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.157", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81b92defba9241bca7e1db3e91030712", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c604e20-6c", "ovs_interfaceid": "8c604e20-6cfc-4498-a689-d24876c07513", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.915473] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095688, 'name': Rename_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.209485] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Creating Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 776.209896] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-14ac2ac0-37f2-4698-b4fb-16d9d45e68ba {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.218305] env[69927]: DEBUG oslo_vmware.api [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 776.218305] env[69927]: value = "task-4095692" [ 776.218305] env[69927]: _type = "Task" [ 776.218305] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.227349] env[69927]: DEBUG oslo_vmware.api [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095692, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.330236] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095690, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.119026} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.331095] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 776.331485] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 776.331718] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 776.342219] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095691, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.416959] env[69927]: DEBUG oslo_concurrency.lockutils [req-ec2af01e-11f1-4800-bb28-c6906f3ddb26 req-5f443b08-f737-44a6-9c21-5c8fd783e5a1 service nova] Releasing lock "refresh_cache-95c02aa2-d587-4c9f-9b02-2992dfe5b1be" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.416959] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095688, 'name': Rename_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.732713] env[69927]: DEBUG oslo_vmware.api [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095692, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.796919] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f09527fb-0f17-474d-b545-4141985b93b0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.811308] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b3cdefa-53c3-4138-bd70-5c71a643202c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.853065] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e0e4aa-8f38-4789-9eb3-0fb7093cac5c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.861284] env[69927]: DEBUG oslo_vmware.api [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095691, 'name': PowerOnVM_Task, 'duration_secs': 0.603878} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.863529] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 776.863744] env[69927]: INFO nova.compute.manager [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Took 6.50 seconds to spawn the instance on the hypervisor. [ 776.863928] env[69927]: DEBUG nova.compute.manager [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 776.864751] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc241023-9caf-4ed6-a5c0-96abcd2d4ee2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.868198] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ab26e1-177d-4e59-b606-46a338fb3f65 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.886386] env[69927]: DEBUG nova.compute.provider_tree [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 776.914644] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095688, 'name': Rename_Task, 'duration_secs': 1.026855} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.914926] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 776.915436] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b0b35492-2610-4dc8-abbb-872df6dd75b1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.922786] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 776.922786] env[69927]: value = "task-4095693" [ 776.922786] env[69927]: _type = "Task" [ 776.922786] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.932442] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095693, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.237388] env[69927]: DEBUG oslo_vmware.api [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095692, 'name': CreateSnapshot_Task, 'duration_secs': 0.620296} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.240029] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Created Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 777.240029] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684f8d8d-6308-4d72-bade-5d2764936d6c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.385081] env[69927]: DEBUG nova.virt.hardware [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 777.385415] env[69927]: DEBUG nova.virt.hardware [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 777.385646] env[69927]: DEBUG nova.virt.hardware [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 777.385880] env[69927]: DEBUG nova.virt.hardware [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 777.386069] env[69927]: DEBUG nova.virt.hardware [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 777.386251] env[69927]: DEBUG nova.virt.hardware [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 777.386499] env[69927]: DEBUG nova.virt.hardware [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 777.386833] env[69927]: DEBUG nova.virt.hardware [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 777.386833] env[69927]: DEBUG nova.virt.hardware [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 777.388946] env[69927]: DEBUG nova.virt.hardware [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 777.388946] env[69927]: DEBUG nova.virt.hardware [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 777.388946] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14584bc7-6469-40b4-8d10-a691ccf11839 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.398021] env[69927]: DEBUG nova.scheduler.client.report [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 777.403564] env[69927]: INFO nova.compute.manager [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Took 38.34 seconds to build instance. [ 777.409249] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c6a2f6-f6f7-487a-a05b-d6abed2e8fd9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.432230] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Instance VIF info [] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 777.440696] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 777.442123] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 777.442515] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e3d557ed-d621-4763-b42b-2957f3a40ce2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.464357] env[69927]: DEBUG oslo_vmware.api [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095693, 'name': PowerOnVM_Task, 'duration_secs': 0.504308} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.465248] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 777.465534] env[69927]: DEBUG nova.compute.manager [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 777.466265] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 777.466265] env[69927]: value = "task-4095694" [ 777.466265] env[69927]: _type = "Task" [ 777.466265] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.467379] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a854b586-9d12-49f8-96f9-02c25987ae52 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.484848] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095694, 'name': CreateVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.762188] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Creating linked-clone VM from snapshot {{(pid=69927) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 777.763067] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-dd31bc77-e8a6-4438-a747-3cfc0d7b2252 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.774217] env[69927]: DEBUG oslo_vmware.api [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 777.774217] env[69927]: value = "task-4095695" [ 777.774217] env[69927]: _type = "Task" [ 777.774217] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.788276] env[69927]: DEBUG oslo_vmware.api [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095695, 'name': CloneVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.904047] env[69927]: DEBUG oslo_concurrency.lockutils [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.723s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.904744] env[69927]: DEBUG nova.compute.manager [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 777.910028] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.253s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 777.910028] env[69927]: DEBUG nova.objects.instance [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lazy-loading 'resources' on Instance uuid b1bcbcfb-2320-434c-901f-0f6a476a3069 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 777.910724] env[69927]: DEBUG oslo_concurrency.lockutils [None req-530d59d6-463f-4ed2-a6a6-599f0640b13a tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Lock "c6a06550-33ed-4fee-bd37-3fce9c55b235" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.105s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.996299] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095694, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.002583] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.285746] env[69927]: DEBUG oslo_vmware.api [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095695, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.413185] env[69927]: DEBUG nova.compute.utils [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 778.419044] env[69927]: DEBUG nova.compute.manager [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 778.421573] env[69927]: DEBUG nova.compute.manager [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 778.423142] env[69927]: DEBUG nova.network.neutron [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 778.481925] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095694, 'name': CreateVM_Task, 'duration_secs': 0.59572} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.484785] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 778.484946] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.485436] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 778.485834] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 778.486030] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eae773d7-cced-4633-92e2-bb3195c3c948 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.491933] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 778.491933] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f3344f-a5a8-9ee4-ac46-92bfe2b95510" [ 778.491933] env[69927]: _type = "Task" [ 778.491933] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.508663] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f3344f-a5a8-9ee4-ac46-92bfe2b95510, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.536596] env[69927]: DEBUG nova.policy [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '56f1cecf90004886bd7b3596ea39811e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b0f8526f19884878a1ca5cb662729d7a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 778.632733] env[69927]: INFO nova.compute.manager [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Rebuilding instance [ 778.700387] env[69927]: DEBUG nova.compute.manager [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 778.701478] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21b67794-bbfd-4378-b7d9-224e5167a291 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.792648] env[69927]: DEBUG oslo_vmware.api [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095695, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.887922] env[69927]: DEBUG nova.network.neutron [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Successfully created port: dc4904f6-1b89-4fc2-9ea9-18f666114c8a {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 778.919697] env[69927]: DEBUG nova.compute.manager [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 778.948315] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.006313] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f3344f-a5a8-9ee4-ac46-92bfe2b95510, 'name': SearchDatastore_Task, 'duration_secs': 0.024057} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.010479] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 779.010859] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 779.011126] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.011294] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 779.011480] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 779.012053] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b9d74bf-4c98-4dc3-8c14-49229961c724 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.023707] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 779.023983] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 779.025171] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75ef59c9-d7ab-42df-81c8-afa3705a9885 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.037531] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 779.037531] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525f1c90-ea6f-532f-a275-fc980754fe8f" [ 779.037531] env[69927]: _type = "Task" [ 779.037531] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.047442] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525f1c90-ea6f-532f-a275-fc980754fe8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.237028] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3b205f-a823-479f-8e13-bcba5c2893c3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.246506] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-049ad5fb-ab4c-41a9-b442-c1dd3202ff74 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.286276] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "9d83dda3-5fb1-416d-9307-faeef454efec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.286842] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "9d83dda3-5fb1-416d-9307-faeef454efec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.286842] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "9d83dda3-5fb1-416d-9307-faeef454efec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.287041] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "9d83dda3-5fb1-416d-9307-faeef454efec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.287245] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "9d83dda3-5fb1-416d-9307-faeef454efec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 779.289669] env[69927]: INFO nova.compute.manager [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Terminating instance [ 779.295243] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d405f36-dea4-4e9a-81ff-63202c1b562e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.306116] env[69927]: DEBUG oslo_vmware.api [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095695, 'name': CloneVM_Task} progress is 95%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.310230] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45067f9c-39e7-4607-bd0e-0ae30f6eaf20 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.326780] env[69927]: DEBUG nova.compute.provider_tree [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.550439] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525f1c90-ea6f-532f-a275-fc980754fe8f, 'name': SearchDatastore_Task, 'duration_secs': 0.014831} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.551338] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad1ff6cc-65f8-41cb-a820-1091c38fbcb3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.557892] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 779.557892] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e6b6eb-3f8d-5352-4ad3-b12cb5f0c647" [ 779.557892] env[69927]: _type = "Task" [ 779.557892] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.568510] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e6b6eb-3f8d-5352-4ad3-b12cb5f0c647, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.722699] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 779.723028] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6991290f-863b-40f2-a7e7-abad68ff9932 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.730792] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Waiting for the task: (returnval){ [ 779.730792] env[69927]: value = "task-4095696" [ 779.730792] env[69927]: _type = "Task" [ 779.730792] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.746446] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095696, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.802442] env[69927]: DEBUG nova.compute.manager [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 779.802684] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 779.803015] env[69927]: DEBUG oslo_vmware.api [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095695, 'name': CloneVM_Task, 'duration_secs': 1.894228} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.803822] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-194d08ea-a7f1-4cd6-a7ee-a070d295cc02 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.807141] env[69927]: INFO nova.virt.vmwareapi.vmops [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Created linked-clone VM from snapshot [ 779.811024] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865abe7a-ae6d-4519-bef8-892861e05e4e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.822556] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Uploading image 68bb5ac2-beb9-4aa4-86bd-63fa31083482 {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 779.827615] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 779.827615] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cb5a0fa4-c5f2-481a-bf19-5d3678b8036c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.833449] env[69927]: DEBUG nova.scheduler.client.report [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 779.844914] env[69927]: DEBUG oslo_vmware.api [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 779.844914] env[69927]: value = "task-4095697" [ 779.844914] env[69927]: _type = "Task" [ 779.844914] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.860164] env[69927]: DEBUG oslo_vmware.api [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095697, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.862627] env[69927]: DEBUG oslo_vmware.rw_handles [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 779.862627] env[69927]: value = "vm-811384" [ 779.862627] env[69927]: _type = "VirtualMachine" [ 779.862627] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 779.863442] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-4a2d138e-db06-48ab-b973-91c05c6d5cf5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.873109] env[69927]: DEBUG oslo_vmware.rw_handles [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lease: (returnval){ [ 779.873109] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52148b0b-fe7b-f9bd-3ea7-4076a796d813" [ 779.873109] env[69927]: _type = "HttpNfcLease" [ 779.873109] env[69927]: } obtained for exporting VM: (result){ [ 779.873109] env[69927]: value = "vm-811384" [ 779.873109] env[69927]: _type = "VirtualMachine" [ 779.873109] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 779.873109] env[69927]: DEBUG oslo_vmware.api [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the lease: (returnval){ [ 779.873109] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52148b0b-fe7b-f9bd-3ea7-4076a796d813" [ 779.873109] env[69927]: _type = "HttpNfcLease" [ 779.873109] env[69927]: } to be ready. {{(pid=69927) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 779.886300] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 779.886300] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52148b0b-fe7b-f9bd-3ea7-4076a796d813" [ 779.886300] env[69927]: _type = "HttpNfcLease" [ 779.886300] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 779.934850] env[69927]: DEBUG nova.compute.manager [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 779.969638] env[69927]: DEBUG nova.virt.hardware [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 779.970941] env[69927]: DEBUG nova.virt.hardware [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 779.970941] env[69927]: DEBUG nova.virt.hardware [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 779.973417] env[69927]: DEBUG nova.virt.hardware [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 779.973638] env[69927]: DEBUG nova.virt.hardware [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 779.973835] env[69927]: DEBUG nova.virt.hardware [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 779.974161] env[69927]: DEBUG nova.virt.hardware [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 779.974378] env[69927]: DEBUG nova.virt.hardware [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 779.974560] env[69927]: DEBUG nova.virt.hardware [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 779.974754] env[69927]: DEBUG nova.virt.hardware [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 779.974941] env[69927]: DEBUG nova.virt.hardware [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 779.977587] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec470525-9f91-4612-b8bf-91a31fdf9c10 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.993357] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e687f1-3177-4d8a-9f25-5b1d2e992e04 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.069221] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e6b6eb-3f8d-5352-4ad3-b12cb5f0c647, 'name': SearchDatastore_Task, 'duration_secs': 0.036366} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.069530] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 780.069810] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] bf4bee47-36ce-43ee-96f1-96f262882986/bf4bee47-36ce-43ee-96f1-96f262882986.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 780.070111] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9bf5b0e1-3815-49aa-9247-20b2b48f3411 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.080023] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 780.080023] env[69927]: value = "task-4095699" [ 780.080023] env[69927]: _type = "Task" [ 780.080023] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.092091] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095699, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.244187] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095696, 'name': PowerOffVM_Task, 'duration_secs': 0.244267} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.244748] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 780.245173] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 780.246916] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f51929-8dba-4aa5-b690-48659e3fc198 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.257625] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 780.257625] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ecb2cb2-9f88-4d2c-bd7f-34c2d102d8d2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.289032] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 780.289032] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 780.289032] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Deleting the datastore file [datastore2] c6a06550-33ed-4fee-bd37-3fce9c55b235 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 780.289032] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-37090d87-e9c3-4fa9-b0d4-01ec5ae75891 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.301381] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Waiting for the task: (returnval){ [ 780.301381] env[69927]: value = "task-4095701" [ 780.301381] env[69927]: _type = "Task" [ 780.301381] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.312671] env[69927]: DEBUG oslo_concurrency.lockutils [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "b67630a4-2c1a-440b-af82-80c908ffa6e9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 780.314366] env[69927]: DEBUG oslo_concurrency.lockutils [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "b67630a4-2c1a-440b-af82-80c908ffa6e9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.314759] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095701, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.338708] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.430s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.346226] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.822s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.346226] env[69927]: DEBUG nova.objects.instance [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lazy-loading 'resources' on Instance uuid ab8a8acc-cab7-4a82-bd90-b34147f17b0e {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 780.357899] env[69927]: DEBUG oslo_vmware.api [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095697, 'name': PowerOffVM_Task, 'duration_secs': 0.386167} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.359014] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 780.359391] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 780.359678] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2472cc04-cad8-4dd8-8704-de19e3dcd6f5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.380857] env[69927]: INFO nova.scheduler.client.report [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Deleted allocations for instance b1bcbcfb-2320-434c-901f-0f6a476a3069 [ 780.389713] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 780.389713] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52148b0b-fe7b-f9bd-3ea7-4076a796d813" [ 780.389713] env[69927]: _type = "HttpNfcLease" [ 780.389713] env[69927]: } is ready. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 780.390347] env[69927]: DEBUG oslo_vmware.rw_handles [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 780.390347] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52148b0b-fe7b-f9bd-3ea7-4076a796d813" [ 780.390347] env[69927]: _type = "HttpNfcLease" [ 780.390347] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 780.391253] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a72edd81-981c-4e2f-ad57-8ad33fb29f84 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.401864] env[69927]: DEBUG oslo_vmware.rw_handles [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f9f93b-9b21-86f3-a7d9-c587116ee257/disk-0.vmdk from lease info. {{(pid=69927) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 780.402215] env[69927]: DEBUG oslo_vmware.rw_handles [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f9f93b-9b21-86f3-a7d9-c587116ee257/disk-0.vmdk for reading. {{(pid=69927) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 780.466871] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 780.467047] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 780.468017] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Deleting the datastore file [datastore2] 9d83dda3-5fb1-416d-9307-faeef454efec {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 780.468737] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88350d43-0848-4639-9295-ac9bd3804b80 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.482266] env[69927]: DEBUG oslo_vmware.api [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 780.482266] env[69927]: value = "task-4095703" [ 780.482266] env[69927]: _type = "Task" [ 780.482266] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.496537] env[69927]: DEBUG oslo_vmware.api [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095703, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.541832] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a90c8856-48ce-42b5-ac63-4c060bceea21 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.598317] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095699, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.813367] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095701, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.194698} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.816364] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 780.816612] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 780.816772] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 780.880404] env[69927]: DEBUG nova.network.neutron [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Successfully updated port: dc4904f6-1b89-4fc2-9ea9-18f666114c8a {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 780.901348] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0c5b7f4e-d3e2-402e-a1a2-b95233343c29 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "b1bcbcfb-2320-434c-901f-0f6a476a3069" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.830s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.960941] env[69927]: DEBUG nova.compute.manager [req-1985c6ad-0272-46cf-a0a6-9ae1452dacde req-fd35f732-f306-4487-83db-4cd768b57495 service nova] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Received event network-vif-plugged-dc4904f6-1b89-4fc2-9ea9-18f666114c8a {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 780.961405] env[69927]: DEBUG oslo_concurrency.lockutils [req-1985c6ad-0272-46cf-a0a6-9ae1452dacde req-fd35f732-f306-4487-83db-4cd768b57495 service nova] Acquiring lock "c2b6b943-f6d6-427f-aba5-1d619d889325-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 780.961738] env[69927]: DEBUG oslo_concurrency.lockutils [req-1985c6ad-0272-46cf-a0a6-9ae1452dacde req-fd35f732-f306-4487-83db-4cd768b57495 service nova] Lock "c2b6b943-f6d6-427f-aba5-1d619d889325-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.962029] env[69927]: DEBUG oslo_concurrency.lockutils [req-1985c6ad-0272-46cf-a0a6-9ae1452dacde req-fd35f732-f306-4487-83db-4cd768b57495 service nova] Lock "c2b6b943-f6d6-427f-aba5-1d619d889325-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.962344] env[69927]: DEBUG nova.compute.manager [req-1985c6ad-0272-46cf-a0a6-9ae1452dacde req-fd35f732-f306-4487-83db-4cd768b57495 service nova] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] No waiting events found dispatching network-vif-plugged-dc4904f6-1b89-4fc2-9ea9-18f666114c8a {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 780.962694] env[69927]: WARNING nova.compute.manager [req-1985c6ad-0272-46cf-a0a6-9ae1452dacde req-fd35f732-f306-4487-83db-4cd768b57495 service nova] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Received unexpected event network-vif-plugged-dc4904f6-1b89-4fc2-9ea9-18f666114c8a for instance with vm_state building and task_state spawning. [ 780.997302] env[69927]: DEBUG oslo_vmware.api [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095703, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.299593} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.003720] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 781.003720] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 781.003720] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 781.003720] env[69927]: INFO nova.compute.manager [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Took 1.20 seconds to destroy the instance on the hypervisor. [ 781.004195] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 781.005514] env[69927]: DEBUG nova.compute.manager [-] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 781.005514] env[69927]: DEBUG nova.network.neutron [-] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 781.093477] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095699, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.638196} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.093988] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] bf4bee47-36ce-43ee-96f1-96f262882986/bf4bee47-36ce-43ee-96f1-96f262882986.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 781.096097] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 781.097449] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0e8f18bb-e6ba-4fd3-b766-b493a15c5f60 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.112134] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 781.112134] env[69927]: value = "task-4095704" [ 781.112134] env[69927]: _type = "Task" [ 781.112134] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.133972] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095704, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.383982] env[69927]: DEBUG oslo_concurrency.lockutils [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Acquiring lock "refresh_cache-c2b6b943-f6d6-427f-aba5-1d619d889325" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.383982] env[69927]: DEBUG oslo_concurrency.lockutils [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Acquired lock "refresh_cache-c2b6b943-f6d6-427f-aba5-1d619d889325" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 781.383982] env[69927]: DEBUG nova.network.neutron [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 781.607498] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c9e3825-0caf-4190-a2ed-41ed9b81320e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.624664] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a522e1-7c7a-444f-9767-5eca6ec7bd03 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.632567] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095704, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075082} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.633710] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 781.634730] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b4613f-bb1d-4895-9860-98a5807b0f53 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.675579] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09efefb1-23f5-41e7-9e64-652defb934a3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.688115] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] bf4bee47-36ce-43ee-96f1-96f262882986/bf4bee47-36ce-43ee-96f1-96f262882986.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 781.688884] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf09b03b-185a-461e-b600-2ce89bce2ebf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.714585] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc38a80-908d-4991-af21-c42d27d1f7c6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.719829] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 781.719829] env[69927]: value = "task-4095705" [ 781.719829] env[69927]: _type = "Task" [ 781.719829] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.729517] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "a2b1684f-82af-42fc-925e-db36f31cfe63" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 781.729811] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "a2b1684f-82af-42fc-925e-db36f31cfe63" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 781.739628] env[69927]: DEBUG nova.compute.provider_tree [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 781.747274] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095705, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.856223] env[69927]: DEBUG nova.virt.hardware [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 781.856223] env[69927]: DEBUG nova.virt.hardware [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 781.856223] env[69927]: DEBUG nova.virt.hardware [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 781.856571] env[69927]: DEBUG nova.virt.hardware [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 781.856736] env[69927]: DEBUG nova.virt.hardware [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 781.857316] env[69927]: DEBUG nova.virt.hardware [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 781.857683] env[69927]: DEBUG nova.virt.hardware [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 781.857912] env[69927]: DEBUG nova.virt.hardware [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 781.858109] env[69927]: DEBUG nova.virt.hardware [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 781.858277] env[69927]: DEBUG nova.virt.hardware [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 781.858593] env[69927]: DEBUG nova.virt.hardware [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 781.859856] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-399b36b2-f8da-412c-b242-a6821f35567c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.869895] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908c62af-3563-4178-8904-50f3ac2fe651 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.888719] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Instance VIF info [] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 781.895103] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 781.895240] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 781.895941] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ee21026b-8d9a-4c25-9639-0adbe37b2530 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.916238] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 781.916238] env[69927]: value = "task-4095706" [ 781.916238] env[69927]: _type = "Task" [ 781.916238] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.927757] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095706, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.955295] env[69927]: DEBUG nova.network.neutron [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 781.996220] env[69927]: DEBUG nova.network.neutron [-] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.166017] env[69927]: DEBUG nova.network.neutron [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Updating instance_info_cache with network_info: [{"id": "dc4904f6-1b89-4fc2-9ea9-18f666114c8a", "address": "fa:16:3e:0a:26:82", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.93", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc4904f6-1b", "ovs_interfaceid": "dc4904f6-1b89-4fc2-9ea9-18f666114c8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.236447] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095705, 'name': ReconfigVM_Task, 'duration_secs': 0.330142} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.236856] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Reconfigured VM instance instance-0000001e to attach disk [datastore1] bf4bee47-36ce-43ee-96f1-96f262882986/bf4bee47-36ce-43ee-96f1-96f262882986.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 782.238041] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-726c6216-6439-4288-9870-077782222769 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.246268] env[69927]: DEBUG nova.scheduler.client.report [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 782.251997] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 782.251997] env[69927]: value = "task-4095707" [ 782.251997] env[69927]: _type = "Task" [ 782.251997] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.268275] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095707, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.429990] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095706, 'name': CreateVM_Task, 'duration_secs': 0.43623} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.432409] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 782.432944] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.435179] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 782.435587] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 782.435873] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d47a9025-b5cc-49a4-877c-fe7f080a2485 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.443860] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Waiting for the task: (returnval){ [ 782.443860] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d128c8-2b19-59dd-ccb1-1964e5c73647" [ 782.443860] env[69927]: _type = "Task" [ 782.443860] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.454718] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d128c8-2b19-59dd-ccb1-1964e5c73647, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.503684] env[69927]: INFO nova.compute.manager [-] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Took 1.50 seconds to deallocate network for instance. [ 782.669671] env[69927]: DEBUG oslo_concurrency.lockutils [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Releasing lock "refresh_cache-c2b6b943-f6d6-427f-aba5-1d619d889325" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 782.670052] env[69927]: DEBUG nova.compute.manager [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Instance network_info: |[{"id": "dc4904f6-1b89-4fc2-9ea9-18f666114c8a", "address": "fa:16:3e:0a:26:82", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.93", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc4904f6-1b", "ovs_interfaceid": "dc4904f6-1b89-4fc2-9ea9-18f666114c8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 782.670503] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:26:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '10b81051-1eb1-406b-888c-4548c470c77e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dc4904f6-1b89-4fc2-9ea9-18f666114c8a', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 782.681065] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 782.681385] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 782.681555] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e4e479cf-b976-48af-a656-5ec27e7e240a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.710833] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 782.710833] env[69927]: value = "task-4095708" [ 782.710833] env[69927]: _type = "Task" [ 782.710833] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.721571] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095708, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.758053] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.414s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 782.760772] env[69927]: DEBUG oslo_concurrency.lockutils [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.972s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 782.765019] env[69927]: DEBUG nova.objects.instance [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Lazy-loading 'resources' on Instance uuid e8e80be6-a82f-4cc5-92fd-366badf519b8 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 782.777521] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095707, 'name': Rename_Task, 'duration_secs': 0.144295} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.778402] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 782.779186] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-748f4b25-9b56-4544-a02a-326ffc3727b2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.789279] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 782.789279] env[69927]: value = "task-4095709" [ 782.789279] env[69927]: _type = "Task" [ 782.789279] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.810322] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095709, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.811500] env[69927]: INFO nova.scheduler.client.report [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Deleted allocations for instance ab8a8acc-cab7-4a82-bd90-b34147f17b0e [ 782.959249] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d128c8-2b19-59dd-ccb1-1964e5c73647, 'name': SearchDatastore_Task, 'duration_secs': 0.033653} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.959762] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 782.959939] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 782.960097] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.960258] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 782.960567] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 782.960742] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-122783c2-1ae1-4805-80a9-685a3545161b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.973269] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 782.973480] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 782.974358] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f891917c-9411-43ae-acaf-5597a68d5702 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.982961] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Waiting for the task: (returnval){ [ 782.982961] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529bf4df-16b2-e835-6a60-baf9a7ef3e39" [ 782.982961] env[69927]: _type = "Task" [ 782.982961] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.996178] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529bf4df-16b2-e835-6a60-baf9a7ef3e39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.013557] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.230249] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095708, 'name': CreateVM_Task, 'duration_secs': 0.477147} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.230249] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 783.230249] env[69927]: DEBUG oslo_concurrency.lockutils [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.230249] env[69927]: DEBUG oslo_concurrency.lockutils [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.230249] env[69927]: DEBUG oslo_concurrency.lockutils [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 783.230476] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c430e24-c803-4f3d-9acf-063a3a244ff1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.237399] env[69927]: DEBUG oslo_vmware.api [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Waiting for the task: (returnval){ [ 783.237399] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f287fb-0b91-d1d9-6939-e03e8c1de69f" [ 783.237399] env[69927]: _type = "Task" [ 783.237399] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.251224] env[69927]: DEBUG oslo_vmware.api [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f287fb-0b91-d1d9-6939-e03e8c1de69f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.303968] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095709, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.322886] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9c05e90d-2745-446b-85e0-65401f3e90c2 tempest-ListServersNegativeTestJSON-2142759027 tempest-ListServersNegativeTestJSON-2142759027-project-member] Lock "ab8a8acc-cab7-4a82-bd90-b34147f17b0e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.585s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.484554] env[69927]: DEBUG nova.compute.manager [req-f2f3c5f5-e376-4728-a569-4b43e2a0fa42 req-09952503-a595-4d5d-aa61-0ae87655ede3 service nova] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Received event network-changed-dc4904f6-1b89-4fc2-9ea9-18f666114c8a {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 783.484804] env[69927]: DEBUG nova.compute.manager [req-f2f3c5f5-e376-4728-a569-4b43e2a0fa42 req-09952503-a595-4d5d-aa61-0ae87655ede3 service nova] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Refreshing instance network info cache due to event network-changed-dc4904f6-1b89-4fc2-9ea9-18f666114c8a. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 783.485073] env[69927]: DEBUG oslo_concurrency.lockutils [req-f2f3c5f5-e376-4728-a569-4b43e2a0fa42 req-09952503-a595-4d5d-aa61-0ae87655ede3 service nova] Acquiring lock "refresh_cache-c2b6b943-f6d6-427f-aba5-1d619d889325" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.485236] env[69927]: DEBUG oslo_concurrency.lockutils [req-f2f3c5f5-e376-4728-a569-4b43e2a0fa42 req-09952503-a595-4d5d-aa61-0ae87655ede3 service nova] Acquired lock "refresh_cache-c2b6b943-f6d6-427f-aba5-1d619d889325" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.485440] env[69927]: DEBUG nova.network.neutron [req-f2f3c5f5-e376-4728-a569-4b43e2a0fa42 req-09952503-a595-4d5d-aa61-0ae87655ede3 service nova] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Refreshing network info cache for port dc4904f6-1b89-4fc2-9ea9-18f666114c8a {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 783.507353] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529bf4df-16b2-e835-6a60-baf9a7ef3e39, 'name': SearchDatastore_Task, 'duration_secs': 0.017162} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.508573] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-550f4211-3edc-4960-a0ca-052962257086 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.525188] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Waiting for the task: (returnval){ [ 783.525188] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521e5630-6c29-cae6-fb66-163eeb3a1876" [ 783.525188] env[69927]: _type = "Task" [ 783.525188] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.534989] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521e5630-6c29-cae6-fb66-163eeb3a1876, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.749224] env[69927]: DEBUG oslo_vmware.api [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f287fb-0b91-d1d9-6939-e03e8c1de69f, 'name': SearchDatastore_Task, 'duration_secs': 0.015583} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.749531] env[69927]: DEBUG oslo_concurrency.lockutils [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 783.749767] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 783.750022] env[69927]: DEBUG oslo_concurrency.lockutils [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.750200] env[69927]: DEBUG oslo_concurrency.lockutils [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.751108] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 783.751108] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-46b7495d-9347-46e9-b0a1-e1dbf07a0a15 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.766185] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 783.769431] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 783.771667] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41438faf-047e-4e54-b369-f33afaea5996 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.781660] env[69927]: DEBUG oslo_vmware.api [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Waiting for the task: (returnval){ [ 783.781660] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f3c24e-b150-a574-1a0f-09fc2b237a22" [ 783.781660] env[69927]: _type = "Task" [ 783.781660] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.797542] env[69927]: DEBUG oslo_vmware.api [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f3c24e-b150-a574-1a0f-09fc2b237a22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.807026] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095709, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.961295] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a1a4be-2be1-4d20-b574-ad9552f8f51c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.971280] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82275614-1c25-47a6-a086-6d0f1ab39c45 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.008743] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a350f2-bbb7-4e3a-be17-a73b7b758453 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.017822] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dbb4a22-1a73-4d7a-9915-29303816c37e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.038951] env[69927]: DEBUG nova.compute.provider_tree [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 784.047355] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521e5630-6c29-cae6-fb66-163eeb3a1876, 'name': SearchDatastore_Task, 'duration_secs': 0.023849} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.050031] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.050326] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] c6a06550-33ed-4fee-bd37-3fce9c55b235/c6a06550-33ed-4fee-bd37-3fce9c55b235.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 784.051252] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6b1b8fec-1869-477d-a047-54dc213e38af {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.059818] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Waiting for the task: (returnval){ [ 784.059818] env[69927]: value = "task-4095710" [ 784.059818] env[69927]: _type = "Task" [ 784.059818] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.070435] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095710, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.150425] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Acquiring lock "480a672c-cb48-45e3-86bd-1741957a5124" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 784.150854] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Lock "480a672c-cb48-45e3-86bd-1741957a5124" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 784.293636] env[69927]: DEBUG oslo_vmware.api [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f3c24e-b150-a574-1a0f-09fc2b237a22, 'name': SearchDatastore_Task, 'duration_secs': 0.018345} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.298289] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-111bd8cb-2853-4f7f-98d1-b026f470f462 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.308262] env[69927]: DEBUG oslo_vmware.api [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095709, 'name': PowerOnVM_Task, 'duration_secs': 1.519212} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.309779] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 784.310027] env[69927]: DEBUG nova.compute.manager [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 784.310395] env[69927]: DEBUG oslo_vmware.api [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Waiting for the task: (returnval){ [ 784.310395] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5289535f-73a9-47e7-28bb-de90dbbacc03" [ 784.310395] env[69927]: _type = "Task" [ 784.310395] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.311334] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69ba3734-4dca-4989-8beb-08bfacbc20de {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.330421] env[69927]: DEBUG oslo_vmware.api [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5289535f-73a9-47e7-28bb-de90dbbacc03, 'name': SearchDatastore_Task, 'duration_secs': 0.013286} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.330738] env[69927]: DEBUG oslo_concurrency.lockutils [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.330998] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] c2b6b943-f6d6-427f-aba5-1d619d889325/c2b6b943-f6d6-427f-aba5-1d619d889325.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 784.331334] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9ced99ee-ed19-49ba-8beb-6c8501a3bcee {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.339075] env[69927]: DEBUG oslo_vmware.api [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Waiting for the task: (returnval){ [ 784.339075] env[69927]: value = "task-4095711" [ 784.339075] env[69927]: _type = "Task" [ 784.339075] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.349014] env[69927]: DEBUG oslo_vmware.api [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095711, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.383203] env[69927]: DEBUG nova.network.neutron [req-f2f3c5f5-e376-4728-a569-4b43e2a0fa42 req-09952503-a595-4d5d-aa61-0ae87655ede3 service nova] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Updated VIF entry in instance network info cache for port dc4904f6-1b89-4fc2-9ea9-18f666114c8a. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 784.383571] env[69927]: DEBUG nova.network.neutron [req-f2f3c5f5-e376-4728-a569-4b43e2a0fa42 req-09952503-a595-4d5d-aa61-0ae87655ede3 service nova] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Updating instance_info_cache with network_info: [{"id": "dc4904f6-1b89-4fc2-9ea9-18f666114c8a", "address": "fa:16:3e:0a:26:82", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.93", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc4904f6-1b", "ovs_interfaceid": "dc4904f6-1b89-4fc2-9ea9-18f666114c8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.542946] env[69927]: DEBUG nova.scheduler.client.report [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 784.572252] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095710, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.836995] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 784.858482] env[69927]: DEBUG oslo_vmware.api [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095711, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.887756] env[69927]: DEBUG oslo_concurrency.lockutils [req-f2f3c5f5-e376-4728-a569-4b43e2a0fa42 req-09952503-a595-4d5d-aa61-0ae87655ede3 service nova] Releasing lock "refresh_cache-c2b6b943-f6d6-427f-aba5-1d619d889325" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.887756] env[69927]: DEBUG nova.compute.manager [req-f2f3c5f5-e376-4728-a569-4b43e2a0fa42 req-09952503-a595-4d5d-aa61-0ae87655ede3 service nova] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Received event network-vif-deleted-9fe1cc7b-35d7-4f3c-84fa-29a7b02aaf97 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 785.050883] env[69927]: DEBUG oslo_concurrency.lockutils [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.290s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.054198] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.703s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.056508] env[69927]: INFO nova.compute.claims [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 785.075341] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095710, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.637156} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.075341] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] c6a06550-33ed-4fee-bd37-3fce9c55b235/c6a06550-33ed-4fee-bd37-3fce9c55b235.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 785.075341] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 785.075742] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4def4c65-a772-4d7a-8bad-9e01a6d649b4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.088512] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Waiting for the task: (returnval){ [ 785.088512] env[69927]: value = "task-4095712" [ 785.088512] env[69927]: _type = "Task" [ 785.088512] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.103656] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095712, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.104652] env[69927]: INFO nova.scheduler.client.report [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Deleted allocations for instance e8e80be6-a82f-4cc5-92fd-366badf519b8 [ 785.351244] env[69927]: DEBUG oslo_vmware.api [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095711, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.800658} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.351586] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] c2b6b943-f6d6-427f-aba5-1d619d889325/c2b6b943-f6d6-427f-aba5-1d619d889325.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 785.351829] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 785.352163] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-68ac87c3-c6a3-427b-91ce-e8981b5435cc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.359812] env[69927]: DEBUG oslo_vmware.api [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Waiting for the task: (returnval){ [ 785.359812] env[69927]: value = "task-4095713" [ 785.359812] env[69927]: _type = "Task" [ 785.359812] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.371661] env[69927]: DEBUG oslo_vmware.api [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095713, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.605820] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095712, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09056} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.608811] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 785.608928] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ff76a8-5e50-468d-9f04-74f18b2613b0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.618941] env[69927]: DEBUG oslo_concurrency.lockutils [None req-553b9690-6e05-442a-8855-c1c9b1961032 tempest-ServersAaction247Test-1214994011 tempest-ServersAaction247Test-1214994011-project-member] Lock "e8e80be6-a82f-4cc5-92fd-366badf519b8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.630s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.644665] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] c6a06550-33ed-4fee-bd37-3fce9c55b235/c6a06550-33ed-4fee-bd37-3fce9c55b235.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 785.645936] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f85bef49-8dbf-441d-b83d-80a4381f1d6b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.673028] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Waiting for the task: (returnval){ [ 785.673028] env[69927]: value = "task-4095714" [ 785.673028] env[69927]: _type = "Task" [ 785.673028] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.685175] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095714, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.874531] env[69927]: DEBUG oslo_vmware.api [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095713, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071092} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.874531] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 785.875092] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a2162c-104c-407e-a093-b014672f9e92 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.907273] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] c2b6b943-f6d6-427f-aba5-1d619d889325/c2b6b943-f6d6-427f-aba5-1d619d889325.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 785.907761] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquiring lock "bf4bee47-36ce-43ee-96f1-96f262882986" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.908197] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Lock "bf4bee47-36ce-43ee-96f1-96f262882986" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.908313] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquiring lock "bf4bee47-36ce-43ee-96f1-96f262882986-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.908510] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Lock "bf4bee47-36ce-43ee-96f1-96f262882986-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.908856] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Lock "bf4bee47-36ce-43ee-96f1-96f262882986-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.910685] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa6bc369-f977-4c51-aadb-1e7d8f453e94 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.927578] env[69927]: INFO nova.compute.manager [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Terminating instance [ 785.938107] env[69927]: DEBUG oslo_vmware.api [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Waiting for the task: (returnval){ [ 785.938107] env[69927]: value = "task-4095715" [ 785.938107] env[69927]: _type = "Task" [ 785.938107] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.951839] env[69927]: DEBUG oslo_vmware.api [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095715, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.191054] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095714, 'name': ReconfigVM_Task, 'duration_secs': 0.442869} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.191402] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Reconfigured VM instance instance-0000001f to attach disk [datastore2] c6a06550-33ed-4fee-bd37-3fce9c55b235/c6a06550-33ed-4fee-bd37-3fce9c55b235.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 786.192399] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2f240b7b-eb23-4c8b-a8d2-81e724b38ca7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.203888] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Waiting for the task: (returnval){ [ 786.203888] env[69927]: value = "task-4095716" [ 786.203888] env[69927]: _type = "Task" [ 786.203888] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.217191] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095716, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.433352] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquiring lock "refresh_cache-bf4bee47-36ce-43ee-96f1-96f262882986" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.433742] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquired lock "refresh_cache-bf4bee47-36ce-43ee-96f1-96f262882986" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 786.434546] env[69927]: DEBUG nova.network.neutron [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 786.456102] env[69927]: DEBUG oslo_vmware.api [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095715, 'name': ReconfigVM_Task, 'duration_secs': 0.503182} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.459569] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Reconfigured VM instance instance-00000020 to attach disk [datastore1] c2b6b943-f6d6-427f-aba5-1d619d889325/c2b6b943-f6d6-427f-aba5-1d619d889325.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 786.460810] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-605e4189-1e24-4df5-aa45-64199e02b75e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.472537] env[69927]: DEBUG oslo_vmware.api [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Waiting for the task: (returnval){ [ 786.472537] env[69927]: value = "task-4095717" [ 786.472537] env[69927]: _type = "Task" [ 786.472537] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.485827] env[69927]: DEBUG oslo_vmware.api [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095717, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.722018] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095716, 'name': Rename_Task, 'duration_secs': 0.222252} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.722018] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 786.722018] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-01426beb-b8ce-497b-99ea-2548a411990f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.730757] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Waiting for the task: (returnval){ [ 786.730757] env[69927]: value = "task-4095718" [ 786.730757] env[69927]: _type = "Task" [ 786.730757] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.738689] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd483c5-a081-4464-8e76-cb618cc5fbb1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.744886] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095718, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.750254] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7995ac24-534a-4c8e-aed4-73bf49ad39ed {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.785971] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-624ff129-8795-460e-afc4-42ec4c044023 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.795253] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35da50a-e0e2-4161-8528-51fcb99753c5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.812021] env[69927]: DEBUG nova.compute.provider_tree [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 786.964686] env[69927]: DEBUG nova.network.neutron [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 786.990549] env[69927]: DEBUG oslo_vmware.api [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095717, 'name': Rename_Task, 'duration_secs': 0.256122} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.993583] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 786.993925] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bf08f60d-3a48-4eaf-ae2e-9c2b7f20415b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.002842] env[69927]: DEBUG oslo_vmware.api [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Waiting for the task: (returnval){ [ 787.002842] env[69927]: value = "task-4095719" [ 787.002842] env[69927]: _type = "Task" [ 787.002842] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.013135] env[69927]: DEBUG oslo_vmware.api [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095719, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.063285] env[69927]: DEBUG nova.network.neutron [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.247471] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095718, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.314825] env[69927]: DEBUG nova.scheduler.client.report [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 787.513431] env[69927]: DEBUG oslo_vmware.api [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095719, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.566244] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Releasing lock "refresh_cache-bf4bee47-36ce-43ee-96f1-96f262882986" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.566703] env[69927]: DEBUG nova.compute.manager [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 787.566900] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 787.567800] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5068b829-8609-40a2-9162-789a51bd7361 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.575771] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 787.576039] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-75377a19-46f3-40f6-bb0f-4be7f43ec0ec {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.582928] env[69927]: DEBUG oslo_vmware.api [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 787.582928] env[69927]: value = "task-4095720" [ 787.582928] env[69927]: _type = "Task" [ 787.582928] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.592655] env[69927]: DEBUG oslo_vmware.api [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095720, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.750279] env[69927]: DEBUG oslo_vmware.api [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095718, 'name': PowerOnVM_Task, 'duration_secs': 0.605157} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.750822] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 787.751102] env[69927]: DEBUG nova.compute.manager [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 787.752051] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26e20385-51cb-4397-8b55-913894e333dc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.825080] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.769s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 787.825080] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.431s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 787.826980] env[69927]: INFO nova.compute.claims [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 787.838601] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Acquiring lock "cb35090d-bfd2-46df-8ee5-d9b068ba0a28" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 787.839170] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Lock "cb35090d-bfd2-46df-8ee5-d9b068ba0a28" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 788.017518] env[69927]: DEBUG oslo_vmware.api [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095719, 'name': PowerOnVM_Task, 'duration_secs': 0.872968} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.017903] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 788.018214] env[69927]: INFO nova.compute.manager [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Took 8.08 seconds to spawn the instance on the hypervisor. [ 788.018485] env[69927]: DEBUG nova.compute.manager [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 788.020117] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda8f007-28ea-495c-b7fd-9f7fb2b828fa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.097427] env[69927]: DEBUG oslo_vmware.api [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095720, 'name': PowerOffVM_Task, 'duration_secs': 0.182331} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.097742] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 788.097924] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 788.098236] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-92386177-db85-4167-adbe-dcae0add6608 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.133422] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 788.133748] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 788.133980] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Deleting the datastore file [datastore1] bf4bee47-36ce-43ee-96f1-96f262882986 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 788.134310] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f3b41ab3-de36-485b-9815-f18d1ef43d04 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.143795] env[69927]: DEBUG oslo_vmware.api [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 788.143795] env[69927]: value = "task-4095722" [ 788.143795] env[69927]: _type = "Task" [ 788.143795] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.155120] env[69927]: DEBUG oslo_vmware.api [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095722, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.276684] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 788.333760] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Acquiring lock "308efd94-6b1f-4669-84be-9e6b86331288" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 788.334086] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Lock "308efd94-6b1f-4669-84be-9e6b86331288" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 788.543274] env[69927]: INFO nova.compute.manager [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Took 48.48 seconds to build instance. [ 788.655359] env[69927]: DEBUG oslo_vmware.api [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095722, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.275105} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.655856] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 788.656192] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 788.656420] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 788.656772] env[69927]: INFO nova.compute.manager [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Took 1.09 seconds to destroy the instance on the hypervisor. [ 788.657232] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 788.657484] env[69927]: DEBUG nova.compute.manager [-] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 788.657614] env[69927]: DEBUG nova.network.neutron [-] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 788.674661] env[69927]: DEBUG nova.network.neutron [-] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 788.838901] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Lock "308efd94-6b1f-4669-84be-9e6b86331288" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.505s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 788.840298] env[69927]: DEBUG nova.compute.manager [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 789.046335] env[69927]: DEBUG oslo_concurrency.lockutils [None req-51d5a2c3-d9a1-4079-8ee6-4eaa88f9e640 tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Lock "c2b6b943-f6d6-427f-aba5-1d619d889325" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.681s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 789.181758] env[69927]: DEBUG nova.network.neutron [-] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.220116] env[69927]: INFO nova.compute.manager [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Rebuilding instance [ 789.287294] env[69927]: DEBUG nova.compute.manager [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 789.288599] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dde495e-aaf4-4ecf-819a-67a93ee25d8c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.348815] env[69927]: DEBUG nova.compute.utils [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 789.350279] env[69927]: DEBUG nova.compute.manager [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 789.350442] env[69927]: DEBUG nova.network.neutron [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 789.404085] env[69927]: DEBUG nova.policy [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dc49a9c80dba40bfb3b59d301a0009a5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '887aef6dcca4410a9071d3e65361cc12', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 789.529747] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a44d8701-b72c-4514-8b2b-183e40a67d87 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.545627] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92bb9422-36fa-4e3e-b71f-30ae52d29694 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.551970] env[69927]: DEBUG nova.compute.manager [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 789.581831] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea4807b-433f-433d-82c8-bebfe0e039d0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.589526] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfb5d0be-ad6d-4a0c-93c3-23d4b6363dc8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.609035] env[69927]: DEBUG nova.compute.provider_tree [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 789.690384] env[69927]: INFO nova.compute.manager [-] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Took 1.03 seconds to deallocate network for instance. [ 789.752506] env[69927]: DEBUG nova.network.neutron [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Successfully created port: 6ab9e78c-be4f-450a-99bc-80d392d1c61f {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 789.853357] env[69927]: DEBUG nova.compute.manager [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 790.105944] env[69927]: DEBUG oslo_concurrency.lockutils [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.112576] env[69927]: DEBUG nova.scheduler.client.report [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 790.198958] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.304934] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 790.305295] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-16d429f6-e454-4db0-bd7f-1d66eedc13c2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.313880] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Waiting for the task: (returnval){ [ 790.313880] env[69927]: value = "task-4095723" [ 790.313880] env[69927]: _type = "Task" [ 790.313880] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.322845] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Acquiring lock "c2b6b943-f6d6-427f-aba5-1d619d889325" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.323243] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Lock "c2b6b943-f6d6-427f-aba5-1d619d889325" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.323544] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Acquiring lock "c2b6b943-f6d6-427f-aba5-1d619d889325-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.323810] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Lock "c2b6b943-f6d6-427f-aba5-1d619d889325-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.325103] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Lock "c2b6b943-f6d6-427f-aba5-1d619d889325-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.334052] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Task: {'id': task-4095723, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.334676] env[69927]: INFO nova.compute.manager [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Terminating instance [ 790.516662] env[69927]: DEBUG oslo_vmware.rw_handles [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f9f93b-9b21-86f3-a7d9-c587116ee257/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 790.517530] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac3e2ed-066b-4947-8390-c4a36d65e100 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.526834] env[69927]: DEBUG oslo_vmware.rw_handles [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f9f93b-9b21-86f3-a7d9-c587116ee257/disk-0.vmdk is in state: ready. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 790.527114] env[69927]: ERROR oslo_vmware.rw_handles [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f9f93b-9b21-86f3-a7d9-c587116ee257/disk-0.vmdk due to incomplete transfer. [ 790.527348] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-7066eb7f-eb3c-425c-944a-744876b80b34 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.541038] env[69927]: DEBUG oslo_vmware.rw_handles [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f9f93b-9b21-86f3-a7d9-c587116ee257/disk-0.vmdk. {{(pid=69927) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 790.541315] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Uploaded image 68bb5ac2-beb9-4aa4-86bd-63fa31083482 to the Glance image server {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 790.543767] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Destroying the VM {{(pid=69927) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 790.544067] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-59df118a-f7b7-4b23-804a-afddae1d2be6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.555520] env[69927]: DEBUG oslo_vmware.api [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 790.555520] env[69927]: value = "task-4095724" [ 790.555520] env[69927]: _type = "Task" [ 790.555520] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.567831] env[69927]: DEBUG oslo_vmware.api [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095724, 'name': Destroy_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.617989] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.793s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.618533] env[69927]: DEBUG nova.compute.manager [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 790.621736] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.558s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.621958] env[69927]: DEBUG nova.objects.instance [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Lazy-loading 'resources' on Instance uuid cdf0ea6e-d884-49c1-87ec-cd6de1376c7f {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 790.828742] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Task: {'id': task-4095723, 'name': PowerOffVM_Task, 'duration_secs': 0.157306} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.829017] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 790.829257] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 790.830031] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8431cde0-da90-4c5e-a26e-9f652552f496 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.838195] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 790.838463] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9c7cb79a-d56f-4127-8ed5-208865b61b63 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.840432] env[69927]: DEBUG nova.compute.manager [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 790.840627] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 790.841403] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e609fed-7dc7-49fa-b8f6-5f30650426ea {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.852202] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 790.852480] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-478022cc-66e8-4eb9-86ca-ee484f18a9d7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.860502] env[69927]: DEBUG oslo_vmware.api [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Waiting for the task: (returnval){ [ 790.860502] env[69927]: value = "task-4095726" [ 790.860502] env[69927]: _type = "Task" [ 790.860502] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.872032] env[69927]: DEBUG nova.compute.manager [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 790.874274] env[69927]: DEBUG oslo_vmware.api [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095726, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.875877] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 790.876078] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 790.876261] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Deleting the datastore file [datastore2] c6a06550-33ed-4fee-bd37-3fce9c55b235 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 790.876512] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8f6d5346-c026-4161-83f3-f25dd3d33073 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.884339] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Waiting for the task: (returnval){ [ 790.884339] env[69927]: value = "task-4095727" [ 790.884339] env[69927]: _type = "Task" [ 790.884339] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.895041] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Task: {'id': task-4095727, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.907963] env[69927]: DEBUG nova.virt.hardware [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 790.908257] env[69927]: DEBUG nova.virt.hardware [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 790.908416] env[69927]: DEBUG nova.virt.hardware [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 790.908596] env[69927]: DEBUG nova.virt.hardware [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 790.908748] env[69927]: DEBUG nova.virt.hardware [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 790.908891] env[69927]: DEBUG nova.virt.hardware [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 790.909117] env[69927]: DEBUG nova.virt.hardware [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 790.909280] env[69927]: DEBUG nova.virt.hardware [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 790.909447] env[69927]: DEBUG nova.virt.hardware [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 790.909611] env[69927]: DEBUG nova.virt.hardware [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 790.909786] env[69927]: DEBUG nova.virt.hardware [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 790.910693] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12300909-2ef6-4e49-89a8-1c9809dfb622 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.920513] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb74d48d-7747-496b-ad4a-6bdd9a2b4188 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.065881] env[69927]: DEBUG oslo_vmware.api [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095724, 'name': Destroy_Task, 'duration_secs': 0.441749} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.067484] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Destroyed the VM [ 791.067756] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Deleting Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 791.070507] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-82cdbf18-8ef2-412b-995c-a05a71b18a8d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.072951] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Acquiring lock "aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.073220] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Lock "aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 791.079935] env[69927]: DEBUG oslo_vmware.api [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 791.079935] env[69927]: value = "task-4095728" [ 791.079935] env[69927]: _type = "Task" [ 791.079935] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.090509] env[69927]: DEBUG oslo_vmware.api [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095728, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.127742] env[69927]: DEBUG nova.compute.utils [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 791.129301] env[69927]: DEBUG nova.compute.manager [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 791.129472] env[69927]: DEBUG nova.network.neutron [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 791.191695] env[69927]: DEBUG nova.policy [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd98497c8260f4692b8d5410447575350', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'babf071cfc564f7d83c28d449c774840', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 791.260069] env[69927]: DEBUG nova.compute.manager [req-a18427c5-37da-496d-81d0-bd378c79e8cd req-f9ab6503-951d-4cf2-a591-6592b40867dc service nova] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Received event network-vif-plugged-6ab9e78c-be4f-450a-99bc-80d392d1c61f {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 791.260415] env[69927]: DEBUG oslo_concurrency.lockutils [req-a18427c5-37da-496d-81d0-bd378c79e8cd req-f9ab6503-951d-4cf2-a591-6592b40867dc service nova] Acquiring lock "cde9885b-1aa8-411d-847e-087fe375002b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.260501] env[69927]: DEBUG oslo_concurrency.lockutils [req-a18427c5-37da-496d-81d0-bd378c79e8cd req-f9ab6503-951d-4cf2-a591-6592b40867dc service nova] Lock "cde9885b-1aa8-411d-847e-087fe375002b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 791.260663] env[69927]: DEBUG oslo_concurrency.lockutils [req-a18427c5-37da-496d-81d0-bd378c79e8cd req-f9ab6503-951d-4cf2-a591-6592b40867dc service nova] Lock "cde9885b-1aa8-411d-847e-087fe375002b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 791.260945] env[69927]: DEBUG nova.compute.manager [req-a18427c5-37da-496d-81d0-bd378c79e8cd req-f9ab6503-951d-4cf2-a591-6592b40867dc service nova] [instance: cde9885b-1aa8-411d-847e-087fe375002b] No waiting events found dispatching network-vif-plugged-6ab9e78c-be4f-450a-99bc-80d392d1c61f {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 791.261450] env[69927]: WARNING nova.compute.manager [req-a18427c5-37da-496d-81d0-bd378c79e8cd req-f9ab6503-951d-4cf2-a591-6592b40867dc service nova] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Received unexpected event network-vif-plugged-6ab9e78c-be4f-450a-99bc-80d392d1c61f for instance with vm_state building and task_state spawning. [ 791.372244] env[69927]: DEBUG oslo_vmware.api [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095726, 'name': PowerOffVM_Task, 'duration_secs': 0.226099} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.372602] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 791.372774] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 791.373042] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d259662f-22ce-440a-aeca-e2a6713185ab {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.395902] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Task: {'id': task-4095727, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.125364} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.396165] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 791.396350] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 791.396529] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 791.445746] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 791.446063] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 791.446249] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Deleting the datastore file [datastore1] c2b6b943-f6d6-427f-aba5-1d619d889325 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 791.450132] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8ec01cfe-42cd-4951-9254-cd50727c29cd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.450616] env[69927]: DEBUG nova.network.neutron [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Successfully updated port: 6ab9e78c-be4f-450a-99bc-80d392d1c61f {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 791.455841] env[69927]: DEBUG oslo_vmware.api [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Waiting for the task: (returnval){ [ 791.455841] env[69927]: value = "task-4095730" [ 791.455841] env[69927]: _type = "Task" [ 791.455841] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.466954] env[69927]: DEBUG oslo_vmware.api [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095730, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.601065] env[69927]: DEBUG oslo_vmware.api [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095728, 'name': RemoveSnapshot_Task} progress is 31%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.630592] env[69927]: DEBUG nova.network.neutron [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Successfully created port: 8d92ef1c-0941-4eaa-a28f-e5cf6d76a571 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 791.634250] env[69927]: DEBUG nova.compute.manager [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 791.741812] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f74bf24-7bd1-4f76-a8a8-8feadc87f9f2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.750180] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8598ff5a-5d26-4c36-912f-2b9b276079a7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.783194] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74efab68-c9ea-4c6a-9787-309453b3561e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.791439] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d14b24b-6ade-41c9-8f6d-0ef759d67d8a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.805706] env[69927]: DEBUG nova.compute.provider_tree [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 791.952881] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Acquiring lock "refresh_cache-cde9885b-1aa8-411d-847e-087fe375002b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.953374] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Acquired lock "refresh_cache-cde9885b-1aa8-411d-847e-087fe375002b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.953374] env[69927]: DEBUG nova.network.neutron [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 791.969947] env[69927]: DEBUG oslo_vmware.api [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Task: {'id': task-4095730, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.202789} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.970261] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 791.970448] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 791.970628] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 791.970798] env[69927]: INFO nova.compute.manager [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Took 1.13 seconds to destroy the instance on the hypervisor. [ 791.971052] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 791.971240] env[69927]: DEBUG nova.compute.manager [-] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 791.971338] env[69927]: DEBUG nova.network.neutron [-] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 792.092964] env[69927]: DEBUG oslo_vmware.api [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095728, 'name': RemoveSnapshot_Task, 'duration_secs': 0.568137} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.093320] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Deleted Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 792.093537] env[69927]: INFO nova.compute.manager [None req-87d66e3e-47f8-4676-8e2a-83ab4d579fe8 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Took 16.43 seconds to snapshot the instance on the hypervisor. [ 792.308414] env[69927]: DEBUG nova.scheduler.client.report [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 792.430937] env[69927]: DEBUG nova.virt.hardware [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 792.431216] env[69927]: DEBUG nova.virt.hardware [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 792.431369] env[69927]: DEBUG nova.virt.hardware [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 792.431547] env[69927]: DEBUG nova.virt.hardware [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 792.431691] env[69927]: DEBUG nova.virt.hardware [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 792.431840] env[69927]: DEBUG nova.virt.hardware [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 792.432054] env[69927]: DEBUG nova.virt.hardware [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 792.432217] env[69927]: DEBUG nova.virt.hardware [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 792.432406] env[69927]: DEBUG nova.virt.hardware [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 792.432572] env[69927]: DEBUG nova.virt.hardware [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 792.432742] env[69927]: DEBUG nova.virt.hardware [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 792.433642] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e5e401-b7c9-49ec-9954-14a28c03ca2c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.442429] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-853f4cff-7c8e-4d79-b77d-6d2d26169058 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.456199] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Instance VIF info [] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 792.461760] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 792.463714] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 792.463963] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d40b8fce-aa43-488e-aa11-53621f4c4e18 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.484344] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 792.484344] env[69927]: value = "task-4095731" [ 792.484344] env[69927]: _type = "Task" [ 792.484344] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.493703] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095731, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.514372] env[69927]: DEBUG nova.network.neutron [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 792.649602] env[69927]: DEBUG nova.compute.manager [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 792.664984] env[69927]: DEBUG nova.network.neutron [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Updating instance_info_cache with network_info: [{"id": "6ab9e78c-be4f-450a-99bc-80d392d1c61f", "address": "fa:16:3e:f3:2a:56", "network": {"id": "9356179a-05b1-41e4-9506-f25e62df1c44", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1532713479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "887aef6dcca4410a9071d3e65361cc12", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ab9e78c-be", "ovs_interfaceid": "6ab9e78c-be4f-450a-99bc-80d392d1c61f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.676441] env[69927]: DEBUG nova.virt.hardware [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 792.676753] env[69927]: DEBUG nova.virt.hardware [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 792.676855] env[69927]: DEBUG nova.virt.hardware [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 792.677359] env[69927]: DEBUG nova.virt.hardware [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 792.677536] env[69927]: DEBUG nova.virt.hardware [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 792.677694] env[69927]: DEBUG nova.virt.hardware [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 792.677942] env[69927]: DEBUG nova.virt.hardware [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 792.678199] env[69927]: DEBUG nova.virt.hardware [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 792.678493] env[69927]: DEBUG nova.virt.hardware [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 792.678715] env[69927]: DEBUG nova.virt.hardware [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 792.678954] env[69927]: DEBUG nova.virt.hardware [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 792.679962] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57495b3-0d5d-49ac-a280-7d66224fe4eb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.689955] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22520be3-79e2-40e7-a2f1-9ffcd098a766 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.755125] env[69927]: DEBUG nova.network.neutron [-] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.814862] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.193s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.817475] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.566s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.819363] env[69927]: INFO nova.compute.claims [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 792.840808] env[69927]: INFO nova.scheduler.client.report [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Deleted allocations for instance cdf0ea6e-d884-49c1-87ec-cd6de1376c7f [ 792.998959] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095731, 'name': CreateVM_Task, 'duration_secs': 0.268724} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.999267] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 792.999675] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.999812] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 793.000156] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 793.000433] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f64dfaf5-3b4f-49f0-bff3-58071ade4529 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.006134] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Waiting for the task: (returnval){ [ 793.006134] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b75940-9a45-891b-74af-97479d091e43" [ 793.006134] env[69927]: _type = "Task" [ 793.006134] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.015103] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b75940-9a45-891b-74af-97479d091e43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.170353] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Releasing lock "refresh_cache-cde9885b-1aa8-411d-847e-087fe375002b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 793.170784] env[69927]: DEBUG nova.compute.manager [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Instance network_info: |[{"id": "6ab9e78c-be4f-450a-99bc-80d392d1c61f", "address": "fa:16:3e:f3:2a:56", "network": {"id": "9356179a-05b1-41e4-9506-f25e62df1c44", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1532713479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "887aef6dcca4410a9071d3e65361cc12", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ab9e78c-be", "ovs_interfaceid": "6ab9e78c-be4f-450a-99bc-80d392d1c61f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 793.171322] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:2a:56', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '205fb402-8eaf-4b61-8f57-8f216024179a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6ab9e78c-be4f-450a-99bc-80d392d1c61f', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 793.179478] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Creating folder: Project (887aef6dcca4410a9071d3e65361cc12). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 793.179737] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-09eb4fae-c7fd-4706-b0bd-945b2e980f38 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.192659] env[69927]: DEBUG nova.network.neutron [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Successfully updated port: 8d92ef1c-0941-4eaa-a28f-e5cf6d76a571 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 793.197078] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Created folder: Project (887aef6dcca4410a9071d3e65361cc12) in parent group-v811283. [ 793.197078] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Creating folder: Instances. Parent ref: group-v811388. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 793.197078] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d7de7e0d-ac32-4445-a659-b29495e8506f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.209772] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Created folder: Instances in parent group-v811388. [ 793.210648] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 793.210878] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 793.211389] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f3776ebe-ca2e-4676-b05c-c78958acee48 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.232654] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 793.232654] env[69927]: value = "task-4095734" [ 793.232654] env[69927]: _type = "Task" [ 793.232654] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.241462] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095734, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.258030] env[69927]: INFO nova.compute.manager [-] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Took 1.29 seconds to deallocate network for instance. [ 793.349382] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cade5f02-75db-46f8-b800-9a9fce362c4b tempest-ServerAddressesTestJSON-1437605563 tempest-ServerAddressesTestJSON-1437605563-project-member] Lock "cdf0ea6e-d884-49c1-87ec-cd6de1376c7f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.762s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.397982] env[69927]: DEBUG nova.compute.manager [req-6250cccf-15a8-4f4d-bdb5-80d9cb9a3e3e req-f01b6c7f-d0f7-48dc-b494-5a47548a7ef7 service nova] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Received event network-changed-6ab9e78c-be4f-450a-99bc-80d392d1c61f {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 793.398217] env[69927]: DEBUG nova.compute.manager [req-6250cccf-15a8-4f4d-bdb5-80d9cb9a3e3e req-f01b6c7f-d0f7-48dc-b494-5a47548a7ef7 service nova] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Refreshing instance network info cache due to event network-changed-6ab9e78c-be4f-450a-99bc-80d392d1c61f. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 793.398269] env[69927]: DEBUG oslo_concurrency.lockutils [req-6250cccf-15a8-4f4d-bdb5-80d9cb9a3e3e req-f01b6c7f-d0f7-48dc-b494-5a47548a7ef7 service nova] Acquiring lock "refresh_cache-cde9885b-1aa8-411d-847e-087fe375002b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.398401] env[69927]: DEBUG oslo_concurrency.lockutils [req-6250cccf-15a8-4f4d-bdb5-80d9cb9a3e3e req-f01b6c7f-d0f7-48dc-b494-5a47548a7ef7 service nova] Acquired lock "refresh_cache-cde9885b-1aa8-411d-847e-087fe375002b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 793.398670] env[69927]: DEBUG nova.network.neutron [req-6250cccf-15a8-4f4d-bdb5-80d9cb9a3e3e req-f01b6c7f-d0f7-48dc-b494-5a47548a7ef7 service nova] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Refreshing network info cache for port 6ab9e78c-be4f-450a-99bc-80d392d1c61f {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 793.519033] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b75940-9a45-891b-74af-97479d091e43, 'name': SearchDatastore_Task, 'duration_secs': 0.01178} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.519211] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 793.519396] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 793.519637] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.519784] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 793.519962] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 793.520255] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d1854809-d5b6-4019-b159-4e25d9545b5c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.529814] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 793.530088] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 793.531177] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14a7d9d7-065a-4779-a9b7-cabad346e3ae {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.539040] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Waiting for the task: (returnval){ [ 793.539040] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5205e2ae-bfd1-7d48-7d95-e45acf34711b" [ 793.539040] env[69927]: _type = "Task" [ 793.539040] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.549580] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5205e2ae-bfd1-7d48-7d95-e45acf34711b, 'name': SearchDatastore_Task, 'duration_secs': 0.009343} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.550297] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c300346-6033-44d0-929f-a3b1508d6890 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.555955] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Waiting for the task: (returnval){ [ 793.555955] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523ca486-2f23-b65b-72c7-796dc1f40912" [ 793.555955] env[69927]: _type = "Task" [ 793.555955] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.564075] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523ca486-2f23-b65b-72c7-796dc1f40912, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.697585] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquiring lock "refresh_cache-353ceb53-07e6-4e9b-bed5-ce9fca368b27" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.697745] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquired lock "refresh_cache-353ceb53-07e6-4e9b-bed5-ce9fca368b27" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 793.698091] env[69927]: DEBUG nova.network.neutron [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 793.744095] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095734, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.765384] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.043484] env[69927]: DEBUG nova.compute.manager [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 794.044782] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8268f80d-7945-4e77-bcce-33d20110a248 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.080035] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523ca486-2f23-b65b-72c7-796dc1f40912, 'name': SearchDatastore_Task, 'duration_secs': 0.010126} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.080035] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 794.080035] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] c6a06550-33ed-4fee-bd37-3fce9c55b235/c6a06550-33ed-4fee-bd37-3fce9c55b235.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 794.080035] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-105e2662-810e-492c-b138-59743c9a9fc9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.088850] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Waiting for the task: (returnval){ [ 794.088850] env[69927]: value = "task-4095735" [ 794.088850] env[69927]: _type = "Task" [ 794.088850] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.098740] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Task: {'id': task-4095735, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.248351] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095734, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.269771] env[69927]: DEBUG nova.network.neutron [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 794.473380] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a1b9f5-eb4f-4890-80f2-b2df28f7db17 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.481955] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4d1b708-09bb-41bf-a18c-6895fbce8e83 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.514927] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b3bd8e-d1e0-4e96-a85e-7ff00780764c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.527492] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5784470d-7d99-4596-8d13-2bca1d12ef91 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.545561] env[69927]: DEBUG nova.compute.provider_tree [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 794.561975] env[69927]: INFO nova.compute.manager [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] instance snapshotting [ 794.567638] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9356d097-49b0-4f4c-b0cd-85a1a4cdcc81 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.599023] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66492312-3d9e-473d-95bd-d8c6569ab4d8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.608119] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Task: {'id': task-4095735, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.518051} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.610346] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] c6a06550-33ed-4fee-bd37-3fce9c55b235/c6a06550-33ed-4fee-bd37-3fce9c55b235.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 794.610677] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 794.613654] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ebcd9f5a-8e8f-43db-846c-733ffc387a3b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.616575] env[69927]: DEBUG nova.network.neutron [req-6250cccf-15a8-4f4d-bdb5-80d9cb9a3e3e req-f01b6c7f-d0f7-48dc-b494-5a47548a7ef7 service nova] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Updated VIF entry in instance network info cache for port 6ab9e78c-be4f-450a-99bc-80d392d1c61f. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 794.616927] env[69927]: DEBUG nova.network.neutron [req-6250cccf-15a8-4f4d-bdb5-80d9cb9a3e3e req-f01b6c7f-d0f7-48dc-b494-5a47548a7ef7 service nova] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Updating instance_info_cache with network_info: [{"id": "6ab9e78c-be4f-450a-99bc-80d392d1c61f", "address": "fa:16:3e:f3:2a:56", "network": {"id": "9356179a-05b1-41e4-9506-f25e62df1c44", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1532713479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "887aef6dcca4410a9071d3e65361cc12", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ab9e78c-be", "ovs_interfaceid": "6ab9e78c-be4f-450a-99bc-80d392d1c61f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.618893] env[69927]: DEBUG nova.network.neutron [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Updating instance_info_cache with network_info: [{"id": "8d92ef1c-0941-4eaa-a28f-e5cf6d76a571", "address": "fa:16:3e:df:a5:12", "network": {"id": "93ea9667-6d8f-46ae-8c4a-af58996dbb46", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1534634422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babf071cfc564f7d83c28d449c774840", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d92ef1c-09", "ovs_interfaceid": "8d92ef1c-0941-4eaa-a28f-e5cf6d76a571", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.625116] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Waiting for the task: (returnval){ [ 794.625116] env[69927]: value = "task-4095736" [ 794.625116] env[69927]: _type = "Task" [ 794.625116] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.634241] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Task: {'id': task-4095736, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.746894] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095734, 'name': CreateVM_Task, 'duration_secs': 1.098647} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.747166] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 794.747938] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.748193] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 794.748568] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 794.748884] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d6b811c-e6a4-4cbf-b7d3-8c3a9b41f96f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.754664] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Waiting for the task: (returnval){ [ 794.754664] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522f612b-8f4e-6c29-58d9-bd26eb24cfdf" [ 794.754664] env[69927]: _type = "Task" [ 794.754664] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.766468] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522f612b-8f4e-6c29-58d9-bd26eb24cfdf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.051609] env[69927]: DEBUG nova.scheduler.client.report [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 795.119902] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Creating Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 795.120131] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e6274982-8c8b-4031-af2e-bdeb978fdae9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.122967] env[69927]: DEBUG oslo_concurrency.lockutils [req-6250cccf-15a8-4f4d-bdb5-80d9cb9a3e3e req-f01b6c7f-d0f7-48dc-b494-5a47548a7ef7 service nova] Releasing lock "refresh_cache-cde9885b-1aa8-411d-847e-087fe375002b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.123228] env[69927]: DEBUG nova.compute.manager [req-6250cccf-15a8-4f4d-bdb5-80d9cb9a3e3e req-f01b6c7f-d0f7-48dc-b494-5a47548a7ef7 service nova] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Received event network-vif-deleted-dc4904f6-1b89-4fc2-9ea9-18f666114c8a {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 795.123461] env[69927]: DEBUG nova.compute.manager [req-6250cccf-15a8-4f4d-bdb5-80d9cb9a3e3e req-f01b6c7f-d0f7-48dc-b494-5a47548a7ef7 service nova] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Received event network-vif-plugged-8d92ef1c-0941-4eaa-a28f-e5cf6d76a571 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 795.123683] env[69927]: DEBUG oslo_concurrency.lockutils [req-6250cccf-15a8-4f4d-bdb5-80d9cb9a3e3e req-f01b6c7f-d0f7-48dc-b494-5a47548a7ef7 service nova] Acquiring lock "353ceb53-07e6-4e9b-bed5-ce9fca368b27-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.123889] env[69927]: DEBUG oslo_concurrency.lockutils [req-6250cccf-15a8-4f4d-bdb5-80d9cb9a3e3e req-f01b6c7f-d0f7-48dc-b494-5a47548a7ef7 service nova] Lock "353ceb53-07e6-4e9b-bed5-ce9fca368b27-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.124060] env[69927]: DEBUG oslo_concurrency.lockutils [req-6250cccf-15a8-4f4d-bdb5-80d9cb9a3e3e req-f01b6c7f-d0f7-48dc-b494-5a47548a7ef7 service nova] Lock "353ceb53-07e6-4e9b-bed5-ce9fca368b27-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.124228] env[69927]: DEBUG nova.compute.manager [req-6250cccf-15a8-4f4d-bdb5-80d9cb9a3e3e req-f01b6c7f-d0f7-48dc-b494-5a47548a7ef7 service nova] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] No waiting events found dispatching network-vif-plugged-8d92ef1c-0941-4eaa-a28f-e5cf6d76a571 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 795.124460] env[69927]: WARNING nova.compute.manager [req-6250cccf-15a8-4f4d-bdb5-80d9cb9a3e3e req-f01b6c7f-d0f7-48dc-b494-5a47548a7ef7 service nova] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Received unexpected event network-vif-plugged-8d92ef1c-0941-4eaa-a28f-e5cf6d76a571 for instance with vm_state building and task_state spawning. [ 795.124661] env[69927]: DEBUG nova.compute.manager [req-6250cccf-15a8-4f4d-bdb5-80d9cb9a3e3e req-f01b6c7f-d0f7-48dc-b494-5a47548a7ef7 service nova] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Received event network-changed-8d92ef1c-0941-4eaa-a28f-e5cf6d76a571 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 795.124772] env[69927]: DEBUG nova.compute.manager [req-6250cccf-15a8-4f4d-bdb5-80d9cb9a3e3e req-f01b6c7f-d0f7-48dc-b494-5a47548a7ef7 service nova] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Refreshing instance network info cache due to event network-changed-8d92ef1c-0941-4eaa-a28f-e5cf6d76a571. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 795.124938] env[69927]: DEBUG oslo_concurrency.lockutils [req-6250cccf-15a8-4f4d-bdb5-80d9cb9a3e3e req-f01b6c7f-d0f7-48dc-b494-5a47548a7ef7 service nova] Acquiring lock "refresh_cache-353ceb53-07e6-4e9b-bed5-ce9fca368b27" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.125274] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Releasing lock "refresh_cache-353ceb53-07e6-4e9b-bed5-ce9fca368b27" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.125550] env[69927]: DEBUG nova.compute.manager [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Instance network_info: |[{"id": "8d92ef1c-0941-4eaa-a28f-e5cf6d76a571", "address": "fa:16:3e:df:a5:12", "network": {"id": "93ea9667-6d8f-46ae-8c4a-af58996dbb46", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1534634422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babf071cfc564f7d83c28d449c774840", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d92ef1c-09", "ovs_interfaceid": "8d92ef1c-0941-4eaa-a28f-e5cf6d76a571", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 795.125874] env[69927]: DEBUG oslo_concurrency.lockutils [req-6250cccf-15a8-4f4d-bdb5-80d9cb9a3e3e req-f01b6c7f-d0f7-48dc-b494-5a47548a7ef7 service nova] Acquired lock "refresh_cache-353ceb53-07e6-4e9b-bed5-ce9fca368b27" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.126055] env[69927]: DEBUG nova.network.neutron [req-6250cccf-15a8-4f4d-bdb5-80d9cb9a3e3e req-f01b6c7f-d0f7-48dc-b494-5a47548a7ef7 service nova] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Refreshing network info cache for port 8d92ef1c-0941-4eaa-a28f-e5cf6d76a571 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 795.128008] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:a5:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69d412f5-01a9-4fed-8488-7b767a13a653', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8d92ef1c-0941-4eaa-a28f-e5cf6d76a571', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 795.135251] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Creating folder: Project (babf071cfc564f7d83c28d449c774840). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 795.144112] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1d677858-04b4-4f8a-9ade-4184a2962e8f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.150267] env[69927]: DEBUG oslo_vmware.api [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 795.150267] env[69927]: value = "task-4095737" [ 795.150267] env[69927]: _type = "Task" [ 795.150267] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.159974] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Task: {'id': task-4095736, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063871} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.161187] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 795.162703] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a11230ca-b4d9-42a1-88d9-6666742dd631 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.165918] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Created folder: Project (babf071cfc564f7d83c28d449c774840) in parent group-v811283. [ 795.166133] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Creating folder: Instances. Parent ref: group-v811391. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 795.169479] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6361f181-35a8-448d-b2c5-ba0542b2ab64 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.171594] env[69927]: DEBUG oslo_vmware.api [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095737, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.190820] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] c6a06550-33ed-4fee-bd37-3fce9c55b235/c6a06550-33ed-4fee-bd37-3fce9c55b235.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 795.192826] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe330b17-0be8-4cae-aacf-50866cda478f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.208039] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Created folder: Instances in parent group-v811391. [ 795.208263] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 795.210674] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 795.211383] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-117c109e-0ee2-483d-8005-7c4c3ce989ca {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.228863] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Waiting for the task: (returnval){ [ 795.228863] env[69927]: value = "task-4095740" [ 795.228863] env[69927]: _type = "Task" [ 795.228863] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.234845] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 795.234845] env[69927]: value = "task-4095741" [ 795.234845] env[69927]: _type = "Task" [ 795.234845] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.238254] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Task: {'id': task-4095740, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.247901] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095741, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.265155] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522f612b-8f4e-6c29-58d9-bd26eb24cfdf, 'name': SearchDatastore_Task, 'duration_secs': 0.010173} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.265589] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.265911] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 795.266244] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.266422] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.266697] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 795.266975] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3749d09b-c7ab-4194-853d-211d3d730f1a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.276529] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 795.276726] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 795.277581] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-723d6c6c-c65b-459f-8cf6-01d49342f340 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.283956] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Waiting for the task: (returnval){ [ 795.283956] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525a951b-16f0-f8df-23b4-c58fb50dc1d5" [ 795.283956] env[69927]: _type = "Task" [ 795.283956] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.296064] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525a951b-16f0-f8df-23b4-c58fb50dc1d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.429960] env[69927]: DEBUG nova.network.neutron [req-6250cccf-15a8-4f4d-bdb5-80d9cb9a3e3e req-f01b6c7f-d0f7-48dc-b494-5a47548a7ef7 service nova] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Updated VIF entry in instance network info cache for port 8d92ef1c-0941-4eaa-a28f-e5cf6d76a571. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 795.430439] env[69927]: DEBUG nova.network.neutron [req-6250cccf-15a8-4f4d-bdb5-80d9cb9a3e3e req-f01b6c7f-d0f7-48dc-b494-5a47548a7ef7 service nova] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Updating instance_info_cache with network_info: [{"id": "8d92ef1c-0941-4eaa-a28f-e5cf6d76a571", "address": "fa:16:3e:df:a5:12", "network": {"id": "93ea9667-6d8f-46ae-8c4a-af58996dbb46", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1534634422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babf071cfc564f7d83c28d449c774840", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d92ef1c-09", "ovs_interfaceid": "8d92ef1c-0941-4eaa-a28f-e5cf6d76a571", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.477934] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4a2fd03f-6e54-4666-9a9f-3f5601cadcb3 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "f6972b90-7746-4a37-8be8-1739f96dc3dc" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.478199] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4a2fd03f-6e54-4666-9a9f-3f5601cadcb3 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "f6972b90-7746-4a37-8be8-1739f96dc3dc" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.554401] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.737s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.555057] env[69927]: DEBUG nova.compute.manager [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 795.559399] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.678s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.560664] env[69927]: INFO nova.compute.claims [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 795.664921] env[69927]: DEBUG oslo_vmware.api [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095737, 'name': CreateSnapshot_Task, 'duration_secs': 0.518276} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.665162] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Created Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 795.666290] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bceec909-4481-4bdc-a957-6a0c152030e6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.739524] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Task: {'id': task-4095740, 'name': ReconfigVM_Task, 'duration_secs': 0.452596} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.742590] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Reconfigured VM instance instance-0000001f to attach disk [datastore1] c6a06550-33ed-4fee-bd37-3fce9c55b235/c6a06550-33ed-4fee-bd37-3fce9c55b235.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 795.743681] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bf0b9b4e-4c9e-42dd-bcd0-cc49c400ddb8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.753485] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095741, 'name': CreateVM_Task, 'duration_secs': 0.369572} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.754761] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 795.755204] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Waiting for the task: (returnval){ [ 795.755204] env[69927]: value = "task-4095742" [ 795.755204] env[69927]: _type = "Task" [ 795.755204] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.756032] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.756283] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.756624] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 795.756976] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec2d714a-67fa-440f-a439-91b5ab885a09 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.766246] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 795.766246] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5229f282-875e-17c4-d73a-b231b6f55829" [ 795.766246] env[69927]: _type = "Task" [ 795.766246] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.769378] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Task: {'id': task-4095742, 'name': Rename_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.778919] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5229f282-875e-17c4-d73a-b231b6f55829, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.795092] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525a951b-16f0-f8df-23b4-c58fb50dc1d5, 'name': SearchDatastore_Task, 'duration_secs': 0.011552} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.795902] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc358cbf-9ea3-4baa-8ee5-e673985857df {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.801685] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Waiting for the task: (returnval){ [ 795.801685] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528dea40-3c83-86c1-9191-4abc725cb267" [ 795.801685] env[69927]: _type = "Task" [ 795.801685] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.810244] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528dea40-3c83-86c1-9191-4abc725cb267, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.933715] env[69927]: DEBUG oslo_concurrency.lockutils [req-6250cccf-15a8-4f4d-bdb5-80d9cb9a3e3e req-f01b6c7f-d0f7-48dc-b494-5a47548a7ef7 service nova] Releasing lock "refresh_cache-353ceb53-07e6-4e9b-bed5-ce9fca368b27" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.981413] env[69927]: DEBUG nova.compute.utils [None req-4a2fd03f-6e54-4666-9a9f-3f5601cadcb3 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 796.071125] env[69927]: DEBUG nova.compute.utils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 796.075027] env[69927]: DEBUG nova.compute.manager [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 796.075027] env[69927]: DEBUG nova.network.neutron [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 796.124008] env[69927]: DEBUG nova.policy [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '96e9df123ad74df4997941b3942e4330', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5551a241903f4911b27b7f4ab1c2f29d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 796.188957] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Creating linked-clone VM from snapshot {{(pid=69927) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 796.189415] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-aa6481a3-5e59-4bca-b739-fb9780ecdaaa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.200151] env[69927]: DEBUG oslo_vmware.api [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 796.200151] env[69927]: value = "task-4095743" [ 796.200151] env[69927]: _type = "Task" [ 796.200151] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.211634] env[69927]: DEBUG oslo_vmware.api [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095743, 'name': CloneVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.267208] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Task: {'id': task-4095742, 'name': Rename_Task, 'duration_secs': 0.143252} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.267550] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 796.267916] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c327c37b-3b8e-4262-8a39-d199859e3fc0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.280429] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5229f282-875e-17c4-d73a-b231b6f55829, 'name': SearchDatastore_Task, 'duration_secs': 0.011863} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.282158] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 796.282407] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 796.282636] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.282788] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 796.282936] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 796.283288] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Waiting for the task: (returnval){ [ 796.283288] env[69927]: value = "task-4095744" [ 796.283288] env[69927]: _type = "Task" [ 796.283288] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.283503] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de4f2a5d-a043-4f0b-bfc0-1de39ff51476 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.294609] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Task: {'id': task-4095744, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.309173] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 796.309549] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 796.314846] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f679c1a-fb80-42f9-bfa6-a64a44899d5f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.318450] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528dea40-3c83-86c1-9191-4abc725cb267, 'name': SearchDatastore_Task, 'duration_secs': 0.010035} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.318779] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 796.319137] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] cde9885b-1aa8-411d-847e-087fe375002b/cde9885b-1aa8-411d-847e-087fe375002b.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 796.319907] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c406dd3b-58e1-4668-99c3-5488b900bf62 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.324350] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 796.324350] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e2f1c8-455c-6d86-f907-3e69ec8db137" [ 796.324350] env[69927]: _type = "Task" [ 796.324350] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.330168] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Waiting for the task: (returnval){ [ 796.330168] env[69927]: value = "task-4095745" [ 796.330168] env[69927]: _type = "Task" [ 796.330168] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.337310] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e2f1c8-455c-6d86-f907-3e69ec8db137, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.342847] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Task: {'id': task-4095745, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.448097] env[69927]: DEBUG nova.network.neutron [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Successfully created port: 5e1c11d7-9069-43ec-8135-0682b4d7d9f9 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 796.484316] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4a2fd03f-6e54-4666-9a9f-3f5601cadcb3 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "f6972b90-7746-4a37-8be8-1739f96dc3dc" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 796.578178] env[69927]: DEBUG nova.compute.manager [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 796.714971] env[69927]: DEBUG oslo_vmware.api [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095743, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.800606] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Task: {'id': task-4095744, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.842574] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e2f1c8-455c-6d86-f907-3e69ec8db137, 'name': SearchDatastore_Task, 'duration_secs': 0.01486} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.844018] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4291da3-93aa-409e-876a-f4b9b387bfca {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.850931] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Task: {'id': task-4095745, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.859545] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 796.859545] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52317a89-769f-9ac7-7d10-fbcf2ae70ed4" [ 796.859545] env[69927]: _type = "Task" [ 796.859545] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.869260] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52317a89-769f-9ac7-7d10-fbcf2ae70ed4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.181748] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77402220-6199-4cde-b46c-4882cfbbb984 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.191357] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f697a7-4ae5-4869-b034-b0d4457cfa8a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.225956] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d50d010b-0fa2-4ab5-b8ef-6fa50787dbb8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.240975] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304be119-d7ce-4045-9b82-90386c19db0c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.245830] env[69927]: DEBUG oslo_vmware.api [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095743, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.257643] env[69927]: DEBUG nova.compute.provider_tree [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 797.301118] env[69927]: DEBUG oslo_vmware.api [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Task: {'id': task-4095744, 'name': PowerOnVM_Task, 'duration_secs': 0.542764} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.301404] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 797.301626] env[69927]: DEBUG nova.compute.manager [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 797.302425] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bca13fd-8f5e-4c12-a985-47600e5e866b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.346694] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Task: {'id': task-4095745, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.587408} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.347210] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] cde9885b-1aa8-411d-847e-087fe375002b/cde9885b-1aa8-411d-847e-087fe375002b.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 797.347439] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 797.347698] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bf98907f-9077-42f7-81e6-5cfb503f3b49 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.356180] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Waiting for the task: (returnval){ [ 797.356180] env[69927]: value = "task-4095746" [ 797.356180] env[69927]: _type = "Task" [ 797.356180] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.373918] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Task: {'id': task-4095746, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.377345] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52317a89-769f-9ac7-7d10-fbcf2ae70ed4, 'name': SearchDatastore_Task, 'duration_secs': 0.028389} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.377610] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 797.377881] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 353ceb53-07e6-4e9b-bed5-ce9fca368b27/353ceb53-07e6-4e9b-bed5-ce9fca368b27.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 797.378168] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51a0392c-d27c-4f98-b181-84708165a30e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.385319] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 797.385319] env[69927]: value = "task-4095747" [ 797.385319] env[69927]: _type = "Task" [ 797.385319] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.395023] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095747, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.572189] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4a2fd03f-6e54-4666-9a9f-3f5601cadcb3 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "f6972b90-7746-4a37-8be8-1739f96dc3dc" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 797.572478] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4a2fd03f-6e54-4666-9a9f-3f5601cadcb3 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "f6972b90-7746-4a37-8be8-1739f96dc3dc" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 797.572653] env[69927]: INFO nova.compute.manager [None req-4a2fd03f-6e54-4666-9a9f-3f5601cadcb3 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Attaching volume b38117da-190d-46b9-8928-95fc2ddfa1bc to /dev/sdb [ 797.591849] env[69927]: DEBUG nova.compute.manager [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 797.608360] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21bc2eff-276c-4260-b006-2eb002d0a66e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.620330] env[69927]: DEBUG nova.virt.hardware [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 797.620599] env[69927]: DEBUG nova.virt.hardware [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 797.620760] env[69927]: DEBUG nova.virt.hardware [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 797.620940] env[69927]: DEBUG nova.virt.hardware [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 797.621100] env[69927]: DEBUG nova.virt.hardware [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 797.621256] env[69927]: DEBUG nova.virt.hardware [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 797.621468] env[69927]: DEBUG nova.virt.hardware [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 797.621626] env[69927]: DEBUG nova.virt.hardware [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 797.621792] env[69927]: DEBUG nova.virt.hardware [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 797.621954] env[69927]: DEBUG nova.virt.hardware [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 797.622163] env[69927]: DEBUG nova.virt.hardware [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 797.623102] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31e36734-4682-46e5-95be-f4e74b2c8e34 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.626317] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f1eb1aa-8b6d-44b6-b7aa-f66164a1512a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.637770] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8896b9d-f028-4037-811c-4dad3d107031 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.645608] env[69927]: DEBUG nova.virt.block_device [None req-4a2fd03f-6e54-4666-9a9f-3f5601cadcb3 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Updating existing volume attachment record: 12b68fd5-0e4e-45a2-93a2-54e607434450 {{(pid=69927) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 797.734919] env[69927]: DEBUG oslo_vmware.api [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095743, 'name': CloneVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.761176] env[69927]: DEBUG nova.scheduler.client.report [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 797.831702] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 797.870372] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Task: {'id': task-4095746, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067952} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.871912] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 797.872857] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-521db5bc-dfa4-41a7-aa95-9874a2d64c8e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.895138] env[69927]: DEBUG nova.compute.manager [req-25644bd5-9dd6-4460-b935-e5010b556c25 req-465f026d-5414-47ac-a78c-537fc6588cdc service nova] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Received event network-vif-plugged-5e1c11d7-9069-43ec-8135-0682b4d7d9f9 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 797.895582] env[69927]: DEBUG oslo_concurrency.lockutils [req-25644bd5-9dd6-4460-b935-e5010b556c25 req-465f026d-5414-47ac-a78c-537fc6588cdc service nova] Acquiring lock "0e6e60e7-d623-44da-912e-804da4d616c9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 797.895582] env[69927]: DEBUG oslo_concurrency.lockutils [req-25644bd5-9dd6-4460-b935-e5010b556c25 req-465f026d-5414-47ac-a78c-537fc6588cdc service nova] Lock "0e6e60e7-d623-44da-912e-804da4d616c9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 797.895744] env[69927]: DEBUG oslo_concurrency.lockutils [req-25644bd5-9dd6-4460-b935-e5010b556c25 req-465f026d-5414-47ac-a78c-537fc6588cdc service nova] Lock "0e6e60e7-d623-44da-912e-804da4d616c9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 797.895919] env[69927]: DEBUG nova.compute.manager [req-25644bd5-9dd6-4460-b935-e5010b556c25 req-465f026d-5414-47ac-a78c-537fc6588cdc service nova] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] No waiting events found dispatching network-vif-plugged-5e1c11d7-9069-43ec-8135-0682b4d7d9f9 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 797.896110] env[69927]: WARNING nova.compute.manager [req-25644bd5-9dd6-4460-b935-e5010b556c25 req-465f026d-5414-47ac-a78c-537fc6588cdc service nova] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Received unexpected event network-vif-plugged-5e1c11d7-9069-43ec-8135-0682b4d7d9f9 for instance with vm_state building and task_state spawning. [ 797.907504] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] cde9885b-1aa8-411d-847e-087fe375002b/cde9885b-1aa8-411d-847e-087fe375002b.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 797.911915] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-64ebd4d6-6d1f-41d8-bf01-64890bbcfa4b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.938250] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095747, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.940169] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Waiting for the task: (returnval){ [ 797.940169] env[69927]: value = "task-4095749" [ 797.940169] env[69927]: _type = "Task" [ 797.940169] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.951356] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Task: {'id': task-4095749, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.999897] env[69927]: DEBUG nova.network.neutron [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Successfully updated port: 5e1c11d7-9069-43ec-8135-0682b4d7d9f9 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 798.235439] env[69927]: DEBUG oslo_vmware.api [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095743, 'name': CloneVM_Task, 'duration_secs': 1.584049} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.235788] env[69927]: INFO nova.virt.vmwareapi.vmops [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Created linked-clone VM from snapshot [ 798.236443] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ad5741-4202-46b1-b49b-6583c61aec05 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.244854] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Uploading image dbff7fb5-0787-47eb-b176-68ad992a47c2 {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 798.267292] env[69927]: DEBUG oslo_vmware.rw_handles [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 798.267292] env[69927]: value = "vm-811395" [ 798.267292] env[69927]: _type = "VirtualMachine" [ 798.267292] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 798.268120] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.709s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.268992] env[69927]: DEBUG nova.compute.manager [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 798.271046] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-501f35f8-f368-497a-a658-41587aaac346 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.273098] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.990s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 798.274563] env[69927]: INFO nova.compute.claims [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 798.285360] env[69927]: DEBUG oslo_vmware.rw_handles [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lease: (returnval){ [ 798.285360] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f0a48a-baa7-8900-a1e7-f6291fbb1fe9" [ 798.285360] env[69927]: _type = "HttpNfcLease" [ 798.285360] env[69927]: } obtained for exporting VM: (result){ [ 798.285360] env[69927]: value = "vm-811395" [ 798.285360] env[69927]: _type = "VirtualMachine" [ 798.285360] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 798.285672] env[69927]: DEBUG oslo_vmware.api [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the lease: (returnval){ [ 798.285672] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f0a48a-baa7-8900-a1e7-f6291fbb1fe9" [ 798.285672] env[69927]: _type = "HttpNfcLease" [ 798.285672] env[69927]: } to be ready. {{(pid=69927) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 798.294313] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 798.294313] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f0a48a-baa7-8900-a1e7-f6291fbb1fe9" [ 798.294313] env[69927]: _type = "HttpNfcLease" [ 798.294313] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 798.406401] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095747, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.817541} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.406625] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 353ceb53-07e6-4e9b-bed5-ce9fca368b27/353ceb53-07e6-4e9b-bed5-ce9fca368b27.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 798.406845] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 798.407112] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-45f21d5e-f4bd-4ab2-a055-0b09013d9aa0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.415522] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 798.415522] env[69927]: value = "task-4095753" [ 798.415522] env[69927]: _type = "Task" [ 798.415522] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.425204] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095753, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.450506] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Task: {'id': task-4095749, 'name': ReconfigVM_Task, 'duration_secs': 0.40989} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.450732] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Reconfigured VM instance instance-00000021 to attach disk [datastore2] cde9885b-1aa8-411d-847e-087fe375002b/cde9885b-1aa8-411d-847e-087fe375002b.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 798.451366] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-92ac45e1-296b-4742-816c-52099aa6280b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.458549] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Waiting for the task: (returnval){ [ 798.458549] env[69927]: value = "task-4095754" [ 798.458549] env[69927]: _type = "Task" [ 798.458549] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.469616] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Task: {'id': task-4095754, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.499756] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "refresh_cache-0e6e60e7-d623-44da-912e-804da4d616c9" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.499844] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquired lock "refresh_cache-0e6e60e7-d623-44da-912e-804da4d616c9" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 798.499991] env[69927]: DEBUG nova.network.neutron [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 798.775764] env[69927]: DEBUG nova.compute.utils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 798.777267] env[69927]: DEBUG nova.compute.manager [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 798.777548] env[69927]: DEBUG nova.network.neutron [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 798.796928] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 798.796928] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f0a48a-baa7-8900-a1e7-f6291fbb1fe9" [ 798.796928] env[69927]: _type = "HttpNfcLease" [ 798.796928] env[69927]: } is ready. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 798.797258] env[69927]: DEBUG oslo_vmware.rw_handles [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 798.797258] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f0a48a-baa7-8900-a1e7-f6291fbb1fe9" [ 798.797258] env[69927]: _type = "HttpNfcLease" [ 798.797258] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 798.798079] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-986195e4-66fb-48a5-a5dc-fda84cc91343 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.808101] env[69927]: DEBUG oslo_vmware.rw_handles [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52dee46c-611c-a99e-73a5-d1d9d865fc7f/disk-0.vmdk from lease info. {{(pid=69927) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 798.808878] env[69927]: DEBUG oslo_vmware.rw_handles [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52dee46c-611c-a99e-73a5-d1d9d865fc7f/disk-0.vmdk for reading. {{(pid=69927) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 798.868456] env[69927]: DEBUG nova.policy [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '96e9df123ad74df4997941b3942e4330', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5551a241903f4911b27b7f4ab1c2f29d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 798.927123] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095753, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094571} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.927434] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 798.928286] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b0920d1-f4de-4335-aebc-e5396b3dde08 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.951063] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] 353ceb53-07e6-4e9b-bed5-ce9fca368b27/353ceb53-07e6-4e9b-bed5-ce9fca368b27.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 798.951378] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81672567-1327-4a04-9f53-48760f05d068 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.975051] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Task: {'id': task-4095754, 'name': Rename_Task, 'duration_secs': 0.163285} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.976210] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 798.976210] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 798.976210] env[69927]: value = "task-4095755" [ 798.976210] env[69927]: _type = "Task" [ 798.976210] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.976210] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-99789a26-5ff8-4904-926d-98f12bf333ef {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.984731] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5133f286-d54d-4a76-a33b-a2c902d507f1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.990406] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095755, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.993288] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Waiting for the task: (returnval){ [ 798.993288] env[69927]: value = "task-4095756" [ 798.993288] env[69927]: _type = "Task" [ 798.993288] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.007040] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Task: {'id': task-4095756, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.065190] env[69927]: DEBUG nova.network.neutron [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 799.072168] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Acquiring lock "c6a06550-33ed-4fee-bd37-3fce9c55b235" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.072498] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Lock "c6a06550-33ed-4fee-bd37-3fce9c55b235" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 799.072745] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Acquiring lock "c6a06550-33ed-4fee-bd37-3fce9c55b235-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.072932] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Lock "c6a06550-33ed-4fee-bd37-3fce9c55b235-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 799.073115] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Lock "c6a06550-33ed-4fee-bd37-3fce9c55b235-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 799.075351] env[69927]: INFO nova.compute.manager [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Terminating instance [ 799.281467] env[69927]: DEBUG nova.compute.manager [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 799.285799] env[69927]: DEBUG nova.network.neutron [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Updating instance_info_cache with network_info: [{"id": "5e1c11d7-9069-43ec-8135-0682b4d7d9f9", "address": "fa:16:3e:e4:04:88", "network": {"id": "77b7df28-cd49-4d70-bd52-38aa177e9bb4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-543587161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5551a241903f4911b27b7f4ab1c2f29d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a1439ce-fa5c-420d-bcf0-083f4cc002cd", "external-id": "nsx-vlan-transportzone-413", "segmentation_id": 413, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e1c11d7-90", "ovs_interfaceid": "5e1c11d7-9069-43ec-8135-0682b4d7d9f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.291971] env[69927]: DEBUG nova.network.neutron [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Successfully created port: 21102681-d376-4436-a4e3-250936a48728 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 799.493159] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095755, 'name': ReconfigVM_Task, 'duration_secs': 0.51025} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.495430] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Reconfigured VM instance instance-00000022 to attach disk [datastore1] 353ceb53-07e6-4e9b-bed5-ce9fca368b27/353ceb53-07e6-4e9b-bed5-ce9fca368b27.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 799.496280] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4f6501d4-d1e6-4b6e-a41f-f26068ad3888 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.511406] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Task: {'id': task-4095756, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.514230] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 799.514230] env[69927]: value = "task-4095757" [ 799.514230] env[69927]: _type = "Task" [ 799.514230] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.525099] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095757, 'name': Rename_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.580727] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Acquiring lock "refresh_cache-c6a06550-33ed-4fee-bd37-3fce9c55b235" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.581583] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Acquired lock "refresh_cache-c6a06550-33ed-4fee-bd37-3fce9c55b235" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 799.581583] env[69927]: DEBUG nova.network.neutron [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 799.798259] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Releasing lock "refresh_cache-0e6e60e7-d623-44da-912e-804da4d616c9" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 799.798850] env[69927]: DEBUG nova.compute.manager [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Instance network_info: |[{"id": "5e1c11d7-9069-43ec-8135-0682b4d7d9f9", "address": "fa:16:3e:e4:04:88", "network": {"id": "77b7df28-cd49-4d70-bd52-38aa177e9bb4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-543587161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5551a241903f4911b27b7f4ab1c2f29d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a1439ce-fa5c-420d-bcf0-083f4cc002cd", "external-id": "nsx-vlan-transportzone-413", "segmentation_id": 413, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e1c11d7-90", "ovs_interfaceid": "5e1c11d7-9069-43ec-8135-0682b4d7d9f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 799.800994] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:04:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0a1439ce-fa5c-420d-bcf0-083f4cc002cd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5e1c11d7-9069-43ec-8135-0682b4d7d9f9', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 799.811333] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Creating folder: Project (5551a241903f4911b27b7f4ab1c2f29d). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 799.811915] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3c770fd5-d500-41d3-b793-b9bcca5c41fb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.826700] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Created folder: Project (5551a241903f4911b27b7f4ab1c2f29d) in parent group-v811283. [ 799.826700] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Creating folder: Instances. Parent ref: group-v811398. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 799.826996] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-42cc2050-85b9-439d-98ce-fd6308097057 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.838248] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Created folder: Instances in parent group-v811398. [ 799.838514] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 799.842212] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 799.844477] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-432c4930-e38a-471f-9ceb-df0b1a0ce722 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.868035] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 799.868035] env[69927]: value = "task-4095760" [ 799.868035] env[69927]: _type = "Task" [ 799.868035] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.878277] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095760, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.002648] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a50f8130-0cd5-47bf-a58b-7f846fc723bd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.011925] env[69927]: DEBUG oslo_vmware.api [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Task: {'id': task-4095756, 'name': PowerOnVM_Task, 'duration_secs': 0.683445} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.014236] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 800.014490] env[69927]: INFO nova.compute.manager [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Took 9.14 seconds to spawn the instance on the hypervisor. [ 800.014727] env[69927]: DEBUG nova.compute.manager [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 800.016222] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd17f01b-b2e2-4ff1-b7c4-927ed75ee4a6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.019047] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-638574c2-3819-45cd-9016-8d7e80e1a34d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.068091] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095757, 'name': Rename_Task, 'duration_secs': 0.189992} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.072049] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d5f54e8-3d4c-496e-a6a9-ee5bf6cee102 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.072049] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 800.072366] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b4aab454-f7f5-4220-89bd-006cb1db32aa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.081439] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d347af-0af3-4415-b199-e1a7369d116e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.090597] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 800.090597] env[69927]: value = "task-4095761" [ 800.090597] env[69927]: _type = "Task" [ 800.090597] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.104846] env[69927]: DEBUG nova.compute.provider_tree [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 800.113315] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095761, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.125376] env[69927]: DEBUG nova.network.neutron [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 800.174205] env[69927]: DEBUG nova.compute.manager [req-247079fb-a943-45fc-9586-1e0f062d274a req-4b564e56-2454-448b-82ed-ceff7561020f service nova] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Received event network-changed-5e1c11d7-9069-43ec-8135-0682b4d7d9f9 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 800.174419] env[69927]: DEBUG nova.compute.manager [req-247079fb-a943-45fc-9586-1e0f062d274a req-4b564e56-2454-448b-82ed-ceff7561020f service nova] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Refreshing instance network info cache due to event network-changed-5e1c11d7-9069-43ec-8135-0682b4d7d9f9. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 800.176969] env[69927]: DEBUG oslo_concurrency.lockutils [req-247079fb-a943-45fc-9586-1e0f062d274a req-4b564e56-2454-448b-82ed-ceff7561020f service nova] Acquiring lock "refresh_cache-0e6e60e7-d623-44da-912e-804da4d616c9" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.176969] env[69927]: DEBUG oslo_concurrency.lockutils [req-247079fb-a943-45fc-9586-1e0f062d274a req-4b564e56-2454-448b-82ed-ceff7561020f service nova] Acquired lock "refresh_cache-0e6e60e7-d623-44da-912e-804da4d616c9" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 800.176969] env[69927]: DEBUG nova.network.neutron [req-247079fb-a943-45fc-9586-1e0f062d274a req-4b564e56-2454-448b-82ed-ceff7561020f service nova] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Refreshing network info cache for port 5e1c11d7-9069-43ec-8135-0682b4d7d9f9 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 800.220398] env[69927]: DEBUG nova.network.neutron [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.302525] env[69927]: DEBUG nova.compute.manager [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 800.334192] env[69927]: DEBUG nova.virt.hardware [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 800.334624] env[69927]: DEBUG nova.virt.hardware [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 800.334849] env[69927]: DEBUG nova.virt.hardware [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 800.335181] env[69927]: DEBUG nova.virt.hardware [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 800.335535] env[69927]: DEBUG nova.virt.hardware [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 800.335881] env[69927]: DEBUG nova.virt.hardware [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 800.336123] env[69927]: DEBUG nova.virt.hardware [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 800.336308] env[69927]: DEBUG nova.virt.hardware [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 800.336570] env[69927]: DEBUG nova.virt.hardware [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 800.336769] env[69927]: DEBUG nova.virt.hardware [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 800.337149] env[69927]: DEBUG nova.virt.hardware [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 800.338073] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f05e62-fae3-4551-9ef9-78db35b00932 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.348042] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de37f2ef-8aae-45ea-8774-3b03180e55e5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.378934] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095760, 'name': CreateVM_Task, 'duration_secs': 0.481577} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.379201] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 800.379983] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.380301] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 800.380692] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 800.380968] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b39ae26b-dd0d-43d6-b039-518aa6491625 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.386413] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 800.386413] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52465ee7-dc58-236a-9a11-51ec51d442a0" [ 800.386413] env[69927]: _type = "Task" [ 800.386413] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.396393] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52465ee7-dc58-236a-9a11-51ec51d442a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.577965] env[69927]: INFO nova.compute.manager [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Took 55.25 seconds to build instance. [ 800.601771] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095761, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.627448] env[69927]: ERROR nova.scheduler.client.report [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [req-7193c26f-2d65-4ad7-9dd6-7d380d14a686] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2f529b36-df5f-4b37-8103-68f74f737726. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7193c26f-2d65-4ad7-9dd6-7d380d14a686"}]} [ 800.647088] env[69927]: DEBUG nova.scheduler.client.report [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Refreshing inventories for resource provider 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 800.663431] env[69927]: DEBUG nova.scheduler.client.report [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Updating ProviderTree inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 800.664149] env[69927]: DEBUG nova.compute.provider_tree [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 800.677025] env[69927]: DEBUG nova.scheduler.client.report [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Refreshing aggregate associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, aggregates: None {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 800.702997] env[69927]: DEBUG nova.scheduler.client.report [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Refreshing trait associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 800.724839] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Releasing lock "refresh_cache-c6a06550-33ed-4fee-bd37-3fce9c55b235" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 800.725537] env[69927]: DEBUG nova.compute.manager [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 800.725777] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 800.726704] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8919808d-1129-49cf-89cf-38a43b186267 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.739679] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 800.740183] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e857713e-2614-412a-9d32-f82ff1fa758e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.752807] env[69927]: DEBUG oslo_vmware.api [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Waiting for the task: (returnval){ [ 800.752807] env[69927]: value = "task-4095763" [ 800.752807] env[69927]: _type = "Task" [ 800.752807] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.766983] env[69927]: DEBUG oslo_vmware.api [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095763, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.899049] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52465ee7-dc58-236a-9a11-51ec51d442a0, 'name': SearchDatastore_Task, 'duration_secs': 0.013408} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.901565] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 800.901814] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 800.902096] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.902245] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 800.902489] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 800.903061] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-26b1d43f-2b3d-446d-81db-56d3647681db {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.919265] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 800.919265] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 800.919265] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f393823-cc36-4fdc-8a83-b7431655e900 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.935312] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 800.935312] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5241a621-20e5-bd41-7be4-ad889b9bcb1a" [ 800.935312] env[69927]: _type = "Task" [ 800.935312] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.950120] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5241a621-20e5-bd41-7be4-ad889b9bcb1a, 'name': SearchDatastore_Task, 'duration_secs': 0.015094} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.951247] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f432bf2c-5668-41ed-9882-6d0769524620 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.960966] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 800.960966] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b7035d-eca7-e9c3-a7c0-fd912dbebcc1" [ 800.960966] env[69927]: _type = "Task" [ 800.960966] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.975927] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b7035d-eca7-e9c3-a7c0-fd912dbebcc1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.069571] env[69927]: DEBUG nova.network.neutron [req-247079fb-a943-45fc-9586-1e0f062d274a req-4b564e56-2454-448b-82ed-ceff7561020f service nova] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Updated VIF entry in instance network info cache for port 5e1c11d7-9069-43ec-8135-0682b4d7d9f9. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 801.070161] env[69927]: DEBUG nova.network.neutron [req-247079fb-a943-45fc-9586-1e0f062d274a req-4b564e56-2454-448b-82ed-ceff7561020f service nova] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Updating instance_info_cache with network_info: [{"id": "5e1c11d7-9069-43ec-8135-0682b4d7d9f9", "address": "fa:16:3e:e4:04:88", "network": {"id": "77b7df28-cd49-4d70-bd52-38aa177e9bb4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-543587161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5551a241903f4911b27b7f4ab1c2f29d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a1439ce-fa5c-420d-bcf0-083f4cc002cd", "external-id": "nsx-vlan-transportzone-413", "segmentation_id": 413, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e1c11d7-90", "ovs_interfaceid": "5e1c11d7-9069-43ec-8135-0682b4d7d9f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.080283] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca22c73a-c84f-4777-a6be-cbc8846e7eb1 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Lock "cde9885b-1aa8-411d-847e-087fe375002b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.170s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 801.109915] env[69927]: DEBUG oslo_vmware.api [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095761, 'name': PowerOnVM_Task, 'duration_secs': 0.889009} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.110701] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 801.111418] env[69927]: INFO nova.compute.manager [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Took 8.46 seconds to spawn the instance on the hypervisor. [ 801.111815] env[69927]: DEBUG nova.compute.manager [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 801.116447] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-785d6e86-1c9d-4f07-a8be-21b45342c3c9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.153967] env[69927]: DEBUG nova.network.neutron [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Successfully updated port: 21102681-d376-4436-a4e3-250936a48728 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 801.266289] env[69927]: DEBUG oslo_vmware.api [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095763, 'name': PowerOffVM_Task, 'duration_secs': 0.162914} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.266773] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 801.267358] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 801.267656] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-64f33efd-8ec6-4415-a17a-12f8a38629e6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.297290] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 801.297694] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 801.297998] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Deleting the datastore file [datastore1] c6a06550-33ed-4fee-bd37-3fce9c55b235 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 801.298367] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-38eb3d74-597d-4e01-bd97-958fe641fa79 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.305565] env[69927]: DEBUG oslo_vmware.api [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Waiting for the task: (returnval){ [ 801.305565] env[69927]: value = "task-4095765" [ 801.305565] env[69927]: _type = "Task" [ 801.305565] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.317185] env[69927]: DEBUG oslo_vmware.api [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095765, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.370279] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12446137-87ba-4b33-91c9-ba52753c2305 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.379230] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-152bc33a-6598-40b3-bb28-7b8af224b2c6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.414131] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc89319-a4c4-48bf-9fa7-e1c8be1bcaec {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.422556] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1564280b-ae59-4c5c-aedd-934ee2c28f45 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.439027] env[69927]: DEBUG nova.compute.provider_tree [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 801.473088] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b7035d-eca7-e9c3-a7c0-fd912dbebcc1, 'name': SearchDatastore_Task, 'duration_secs': 0.015394} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.473385] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 801.473652] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 0e6e60e7-d623-44da-912e-804da4d616c9/0e6e60e7-d623-44da-912e-804da4d616c9.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 801.474063] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2d4eaff3-0962-4fed-bc7c-4ae1bb128e69 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.481071] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 801.481071] env[69927]: value = "task-4095766" [ 801.481071] env[69927]: _type = "Task" [ 801.481071] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.489778] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095766, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.567368] env[69927]: DEBUG oslo_concurrency.lockutils [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Acquiring lock "cde9885b-1aa8-411d-847e-087fe375002b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.567548] env[69927]: DEBUG oslo_concurrency.lockutils [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Lock "cde9885b-1aa8-411d-847e-087fe375002b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 801.567842] env[69927]: DEBUG oslo_concurrency.lockutils [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Acquiring lock "cde9885b-1aa8-411d-847e-087fe375002b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.568149] env[69927]: DEBUG oslo_concurrency.lockutils [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Lock "cde9885b-1aa8-411d-847e-087fe375002b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 801.568397] env[69927]: DEBUG oslo_concurrency.lockutils [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Lock "cde9885b-1aa8-411d-847e-087fe375002b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 801.570754] env[69927]: INFO nova.compute.manager [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Terminating instance [ 801.572495] env[69927]: DEBUG oslo_concurrency.lockutils [req-247079fb-a943-45fc-9586-1e0f062d274a req-4b564e56-2454-448b-82ed-ceff7561020f service nova] Releasing lock "refresh_cache-0e6e60e7-d623-44da-912e-804da4d616c9" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 801.583613] env[69927]: DEBUG nova.compute.manager [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 801.638976] env[69927]: INFO nova.compute.manager [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Took 51.26 seconds to build instance. [ 801.660941] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "refresh_cache-915797c5-6f68-4355-a6b0-ad2b06b826cb" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.660941] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquired lock "refresh_cache-915797c5-6f68-4355-a6b0-ad2b06b826cb" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 801.660941] env[69927]: DEBUG nova.network.neutron [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 801.824289] env[69927]: DEBUG oslo_vmware.api [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Task: {'id': task-4095765, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158292} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.824747] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 801.824965] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 801.825176] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 801.825365] env[69927]: INFO nova.compute.manager [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Took 1.10 seconds to destroy the instance on the hypervisor. [ 801.825701] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 801.825929] env[69927]: DEBUG nova.compute.manager [-] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 801.826165] env[69927]: DEBUG nova.network.neutron [-] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 801.847259] env[69927]: DEBUG nova.network.neutron [-] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 801.995311] env[69927]: DEBUG nova.scheduler.client.report [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Updated inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 with generation 63 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 801.995311] env[69927]: DEBUG nova.compute.provider_tree [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Updating resource provider 2f529b36-df5f-4b37-8103-68f74f737726 generation from 63 to 64 during operation: update_inventory {{(pid=69927) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 801.995311] env[69927]: DEBUG nova.compute.provider_tree [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 802.001791] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095766, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.075322] env[69927]: DEBUG nova.compute.manager [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 802.075530] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 802.076609] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c72f874-1189-46eb-84b0-9fe87f442d3a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.086136] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 802.086411] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-db7e72a1-6011-4894-bfc1-b8369f565db4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.097021] env[69927]: DEBUG oslo_vmware.api [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Waiting for the task: (returnval){ [ 802.097021] env[69927]: value = "task-4095767" [ 802.097021] env[69927]: _type = "Task" [ 802.097021] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.107574] env[69927]: DEBUG oslo_vmware.api [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Task: {'id': task-4095767, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.118718] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.141546] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db61551e-168a-4dfa-b3ad-5e6347b4a563 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "353ceb53-07e6-4e9b-bed5-ce9fca368b27" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.072s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.206922] env[69927]: DEBUG nova.network.neutron [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 802.231293] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a2fd03f-6e54-4666-9a9f-3f5601cadcb3 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Volume attach. Driver type: vmdk {{(pid=69927) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 802.231567] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a2fd03f-6e54-4666-9a9f-3f5601cadcb3 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811397', 'volume_id': 'b38117da-190d-46b9-8928-95fc2ddfa1bc', 'name': 'volume-b38117da-190d-46b9-8928-95fc2ddfa1bc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f6972b90-7746-4a37-8be8-1739f96dc3dc', 'attached_at': '', 'detached_at': '', 'volume_id': 'b38117da-190d-46b9-8928-95fc2ddfa1bc', 'serial': 'b38117da-190d-46b9-8928-95fc2ddfa1bc'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 802.232840] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c928fb5-ccca-45d7-ac3d-9421e8459cad {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.260015] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6978e31a-ff80-4743-aebe-96679b6506b3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.288295] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a2fd03f-6e54-4666-9a9f-3f5601cadcb3 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] volume-b38117da-190d-46b9-8928-95fc2ddfa1bc/volume-b38117da-190d-46b9-8928-95fc2ddfa1bc.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 802.291519] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9746a1d8-b9d5-4c25-8555-b000cbdde060 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.312599] env[69927]: DEBUG oslo_vmware.api [None req-4a2fd03f-6e54-4666-9a9f-3f5601cadcb3 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 802.312599] env[69927]: value = "task-4095768" [ 802.312599] env[69927]: _type = "Task" [ 802.312599] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.322675] env[69927]: DEBUG oslo_vmware.api [None req-4a2fd03f-6e54-4666-9a9f-3f5601cadcb3 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095768, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.350344] env[69927]: DEBUG nova.network.neutron [-] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.398689] env[69927]: DEBUG nova.compute.manager [req-7caa6049-cb1d-4474-8978-b7e7fb27cf15 req-72044354-7bb2-4825-89d8-4e218096050e service nova] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Received event network-vif-plugged-21102681-d376-4436-a4e3-250936a48728 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 802.398901] env[69927]: DEBUG oslo_concurrency.lockutils [req-7caa6049-cb1d-4474-8978-b7e7fb27cf15 req-72044354-7bb2-4825-89d8-4e218096050e service nova] Acquiring lock "915797c5-6f68-4355-a6b0-ad2b06b826cb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.399143] env[69927]: DEBUG oslo_concurrency.lockutils [req-7caa6049-cb1d-4474-8978-b7e7fb27cf15 req-72044354-7bb2-4825-89d8-4e218096050e service nova] Lock "915797c5-6f68-4355-a6b0-ad2b06b826cb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.399313] env[69927]: DEBUG oslo_concurrency.lockutils [req-7caa6049-cb1d-4474-8978-b7e7fb27cf15 req-72044354-7bb2-4825-89d8-4e218096050e service nova] Lock "915797c5-6f68-4355-a6b0-ad2b06b826cb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.399663] env[69927]: DEBUG nova.compute.manager [req-7caa6049-cb1d-4474-8978-b7e7fb27cf15 req-72044354-7bb2-4825-89d8-4e218096050e service nova] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] No waiting events found dispatching network-vif-plugged-21102681-d376-4436-a4e3-250936a48728 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 802.399663] env[69927]: WARNING nova.compute.manager [req-7caa6049-cb1d-4474-8978-b7e7fb27cf15 req-72044354-7bb2-4825-89d8-4e218096050e service nova] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Received unexpected event network-vif-plugged-21102681-d376-4436-a4e3-250936a48728 for instance with vm_state building and task_state spawning. [ 802.399808] env[69927]: DEBUG nova.compute.manager [req-7caa6049-cb1d-4474-8978-b7e7fb27cf15 req-72044354-7bb2-4825-89d8-4e218096050e service nova] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Received event network-changed-21102681-d376-4436-a4e3-250936a48728 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 802.400013] env[69927]: DEBUG nova.compute.manager [req-7caa6049-cb1d-4474-8978-b7e7fb27cf15 req-72044354-7bb2-4825-89d8-4e218096050e service nova] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Refreshing instance network info cache due to event network-changed-21102681-d376-4436-a4e3-250936a48728. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 802.400138] env[69927]: DEBUG oslo_concurrency.lockutils [req-7caa6049-cb1d-4474-8978-b7e7fb27cf15 req-72044354-7bb2-4825-89d8-4e218096050e service nova] Acquiring lock "refresh_cache-915797c5-6f68-4355-a6b0-ad2b06b826cb" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.428441] env[69927]: DEBUG nova.network.neutron [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Updating instance_info_cache with network_info: [{"id": "21102681-d376-4436-a4e3-250936a48728", "address": "fa:16:3e:c8:f7:29", "network": {"id": "77b7df28-cd49-4d70-bd52-38aa177e9bb4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-543587161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5551a241903f4911b27b7f4ab1c2f29d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a1439ce-fa5c-420d-bcf0-083f4cc002cd", "external-id": "nsx-vlan-transportzone-413", "segmentation_id": 413, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21102681-d3", "ovs_interfaceid": "21102681-d376-4436-a4e3-250936a48728", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.495067] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095766, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.613189} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.495067] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 0e6e60e7-d623-44da-912e-804da4d616c9/0e6e60e7-d623-44da-912e-804da4d616c9.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 802.495067] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 802.495067] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ffef771b-9318-4d06-a20d-40bcd16df57c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.501262] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 802.501262] env[69927]: value = "task-4095769" [ 802.501262] env[69927]: _type = "Task" [ 802.501262] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.505577] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.232s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.506628] env[69927]: DEBUG nova.compute.manager [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 802.509514] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 37.011s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.517637] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095769, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.608824] env[69927]: DEBUG oslo_vmware.api [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Task: {'id': task-4095767, 'name': PowerOffVM_Task, 'duration_secs': 0.285665} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.609523] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 802.609523] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 802.609722] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-71c819c9-725c-4fb9-9013-a74462a454d0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.644840] env[69927]: DEBUG nova.compute.manager [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 802.694181] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 802.694664] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 802.695080] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Deleting the datastore file [datastore2] cde9885b-1aa8-411d-847e-087fe375002b {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 802.695799] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8f775ea1-4c57-499c-802f-937ff2a84ff0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.704034] env[69927]: DEBUG oslo_vmware.api [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Waiting for the task: (returnval){ [ 802.704034] env[69927]: value = "task-4095771" [ 802.704034] env[69927]: _type = "Task" [ 802.704034] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.713640] env[69927]: DEBUG oslo_vmware.api [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Task: {'id': task-4095771, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.824265] env[69927]: DEBUG oslo_vmware.api [None req-4a2fd03f-6e54-4666-9a9f-3f5601cadcb3 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095768, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.853544] env[69927]: INFO nova.compute.manager [-] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Took 1.03 seconds to deallocate network for instance. [ 802.931486] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Releasing lock "refresh_cache-915797c5-6f68-4355-a6b0-ad2b06b826cb" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 802.931882] env[69927]: DEBUG nova.compute.manager [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Instance network_info: |[{"id": "21102681-d376-4436-a4e3-250936a48728", "address": "fa:16:3e:c8:f7:29", "network": {"id": "77b7df28-cd49-4d70-bd52-38aa177e9bb4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-543587161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5551a241903f4911b27b7f4ab1c2f29d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a1439ce-fa5c-420d-bcf0-083f4cc002cd", "external-id": "nsx-vlan-transportzone-413", "segmentation_id": 413, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21102681-d3", "ovs_interfaceid": "21102681-d376-4436-a4e3-250936a48728", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 802.932289] env[69927]: DEBUG oslo_concurrency.lockutils [req-7caa6049-cb1d-4474-8978-b7e7fb27cf15 req-72044354-7bb2-4825-89d8-4e218096050e service nova] Acquired lock "refresh_cache-915797c5-6f68-4355-a6b0-ad2b06b826cb" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 802.932508] env[69927]: DEBUG nova.network.neutron [req-7caa6049-cb1d-4474-8978-b7e7fb27cf15 req-72044354-7bb2-4825-89d8-4e218096050e service nova] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Refreshing network info cache for port 21102681-d376-4436-a4e3-250936a48728 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 802.933767] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:f7:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0a1439ce-fa5c-420d-bcf0-083f4cc002cd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '21102681-d376-4436-a4e3-250936a48728', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 802.941358] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 802.945060] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 802.945602] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4861ab87-c44d-47fb-9a01-813e99775746 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.968874] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 802.968874] env[69927]: value = "task-4095772" [ 802.968874] env[69927]: _type = "Task" [ 802.968874] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.978302] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095772, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.012009] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095769, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066873} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.012361] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 803.013623] env[69927]: DEBUG nova.compute.utils [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 803.015562] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9585eaf8-7092-4993-9963-00ae5ed874bb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.018542] env[69927]: DEBUG nova.compute.manager [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 803.018721] env[69927]: DEBUG nova.network.neutron [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 803.060277] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] 0e6e60e7-d623-44da-912e-804da4d616c9/0e6e60e7-d623-44da-912e-804da4d616c9.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 803.061759] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55003a3d-6222-470d-a9ae-ee9770cd2590 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.088087] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 803.088087] env[69927]: value = "task-4095773" [ 803.088087] env[69927]: _type = "Task" [ 803.088087] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.098902] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095773, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.104730] env[69927]: DEBUG nova.policy [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cd1a4b19b9874a17bde997440649c7e5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1c5a402c4ef2452b9809e30a2fe91431', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 803.172570] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.215846] env[69927]: DEBUG oslo_vmware.api [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Task: {'id': task-4095771, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.23513} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.216386] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 803.216572] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 803.216797] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 803.216959] env[69927]: INFO nova.compute.manager [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 803.217227] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 803.217446] env[69927]: DEBUG nova.compute.manager [-] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 803.217550] env[69927]: DEBUG nova.network.neutron [-] [instance: cde9885b-1aa8-411d-847e-087fe375002b] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 803.312618] env[69927]: DEBUG nova.network.neutron [req-7caa6049-cb1d-4474-8978-b7e7fb27cf15 req-72044354-7bb2-4825-89d8-4e218096050e service nova] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Updated VIF entry in instance network info cache for port 21102681-d376-4436-a4e3-250936a48728. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 803.313093] env[69927]: DEBUG nova.network.neutron [req-7caa6049-cb1d-4474-8978-b7e7fb27cf15 req-72044354-7bb2-4825-89d8-4e218096050e service nova] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Updating instance_info_cache with network_info: [{"id": "21102681-d376-4436-a4e3-250936a48728", "address": "fa:16:3e:c8:f7:29", "network": {"id": "77b7df28-cd49-4d70-bd52-38aa177e9bb4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-543587161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5551a241903f4911b27b7f4ab1c2f29d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a1439ce-fa5c-420d-bcf0-083f4cc002cd", "external-id": "nsx-vlan-transportzone-413", "segmentation_id": 413, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21102681-d3", "ovs_interfaceid": "21102681-d376-4436-a4e3-250936a48728", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.328721] env[69927]: DEBUG oslo_vmware.api [None req-4a2fd03f-6e54-4666-9a9f-3f5601cadcb3 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095768, 'name': ReconfigVM_Task, 'duration_secs': 0.733816} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.329817] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a2fd03f-6e54-4666-9a9f-3f5601cadcb3 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Reconfigured VM instance instance-0000001a to attach disk [datastore1] volume-b38117da-190d-46b9-8928-95fc2ddfa1bc/volume-b38117da-190d-46b9-8928-95fc2ddfa1bc.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 803.337408] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b869791-58dd-4b23-9a47-bb4eab9df06d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.361427] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.367835] env[69927]: DEBUG oslo_vmware.api [None req-4a2fd03f-6e54-4666-9a9f-3f5601cadcb3 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 803.367835] env[69927]: value = "task-4095774" [ 803.367835] env[69927]: _type = "Task" [ 803.367835] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.383327] env[69927]: DEBUG oslo_vmware.api [None req-4a2fd03f-6e54-4666-9a9f-3f5601cadcb3 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095774, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.485823] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095772, 'name': CreateVM_Task, 'duration_secs': 0.438045} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.486450] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 803.492939] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.492939] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 803.492939] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 803.492939] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54263bea-8a3d-4457-855e-166c4ec32c18 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.499024] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 803.499024] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5298f4e2-6a8c-00ff-aa93-d399d46ad2f5" [ 803.499024] env[69927]: _type = "Task" [ 803.499024] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.508057] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5298f4e2-6a8c-00ff-aa93-d399d46ad2f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.519272] env[69927]: DEBUG nova.compute.manager [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 803.534574] env[69927]: DEBUG nova.network.neutron [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Successfully created port: 85825835-357a-42a3-81f4-b55d7e165b65 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 803.587966] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 7ce79e41-333a-4ef3-ba68-f74067d4ac5a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 803.588073] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 8442f144-2be4-4634-b151-62f049a975b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 803.588259] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 803.588352] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 5f67d6a0-e4b7-435e-8991-0f54e0379d22 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 803.588480] env[69927]: WARNING nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 6e698775-2556-4cbe-b65f-0cc3efa7bcf6 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 803.588602] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance a536b069-45e0-4ffe-be53-ac33f8cb6ec0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 803.588733] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 77c6ce9e-5e15-41e4-aa81-1ef01248aa32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 803.588837] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 9348e368-cc3c-4bde-91ae-26fd03ad536a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 803.589069] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 14359034-232d-478f-bf65-cf9937c59229 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 803.589113] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance f6972b90-7746-4a37-8be8-1739f96dc3dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 803.589192] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance a4249857-6f60-4040-b676-d2d19dc83f15 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 803.589348] env[69927]: WARNING nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 9d83dda3-5fb1-416d-9307-faeef454efec is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 803.589450] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 95c02aa2-d587-4c9f-9b02-2992dfe5b1be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 803.589572] env[69927]: WARNING nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance bf4bee47-36ce-43ee-96f1-96f262882986 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 803.589683] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance c6a06550-33ed-4fee-bd37-3fce9c55b235 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 803.589821] env[69927]: WARNING nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance c2b6b943-f6d6-427f-aba5-1d619d889325 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 803.589936] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance cde9885b-1aa8-411d-847e-087fe375002b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 803.590180] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 353ceb53-07e6-4e9b-bed5-ce9fca368b27 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 803.590353] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 0e6e60e7-d623-44da-912e-804da4d616c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 803.590528] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 915797c5-6f68-4355-a6b0-ad2b06b826cb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 803.590659] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 256319c4-817d-4267-8531-a65f0f8cd0b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 803.608031] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095773, 'name': ReconfigVM_Task, 'duration_secs': 0.35429} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.608816] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Reconfigured VM instance instance-00000023 to attach disk [datastore1] 0e6e60e7-d623-44da-912e-804da4d616c9/0e6e60e7-d623-44da-912e-804da4d616c9.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 803.612128] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b5e2a5b9-a7d6-4ca3-97f8-de9c440d4aeb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.621509] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 803.621509] env[69927]: value = "task-4095775" [ 803.621509] env[69927]: _type = "Task" [ 803.621509] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.635958] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095775, 'name': Rename_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.818497] env[69927]: DEBUG oslo_concurrency.lockutils [req-7caa6049-cb1d-4474-8978-b7e7fb27cf15 req-72044354-7bb2-4825-89d8-4e218096050e service nova] Releasing lock "refresh_cache-915797c5-6f68-4355-a6b0-ad2b06b826cb" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 803.875374] env[69927]: DEBUG nova.compute.manager [req-52bffe2d-da6b-4b86-9843-956b674467f6 req-bb16b796-5881-457d-8416-867d58e5d75b service nova] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Received event network-vif-deleted-6ab9e78c-be4f-450a-99bc-80d392d1c61f {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 803.875564] env[69927]: INFO nova.compute.manager [req-52bffe2d-da6b-4b86-9843-956b674467f6 req-bb16b796-5881-457d-8416-867d58e5d75b service nova] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Neutron deleted interface 6ab9e78c-be4f-450a-99bc-80d392d1c61f; detaching it from the instance and deleting it from the info cache [ 803.875730] env[69927]: DEBUG nova.network.neutron [req-52bffe2d-da6b-4b86-9843-956b674467f6 req-bb16b796-5881-457d-8416-867d58e5d75b service nova] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.884113] env[69927]: DEBUG oslo_vmware.api [None req-4a2fd03f-6e54-4666-9a9f-3f5601cadcb3 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095774, 'name': ReconfigVM_Task, 'duration_secs': 0.190024} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.884458] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a2fd03f-6e54-4666-9a9f-3f5601cadcb3 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811397', 'volume_id': 'b38117da-190d-46b9-8928-95fc2ddfa1bc', 'name': 'volume-b38117da-190d-46b9-8928-95fc2ddfa1bc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f6972b90-7746-4a37-8be8-1739f96dc3dc', 'attached_at': '', 'detached_at': '', 'volume_id': 'b38117da-190d-46b9-8928-95fc2ddfa1bc', 'serial': 'b38117da-190d-46b9-8928-95fc2ddfa1bc'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 804.009343] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5298f4e2-6a8c-00ff-aa93-d399d46ad2f5, 'name': SearchDatastore_Task, 'duration_secs': 0.012875} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.009704] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 804.009945] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 804.010206] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.010354] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 804.010530] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 804.010816] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f01b4130-cc7d-4957-92a2-75edb655557e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.021276] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 804.021584] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 804.022441] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ffe051c-d10c-4e3f-9f31-959982815882 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.034135] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 804.034135] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522d7910-01a5-1f24-47f4-f16beb20c72e" [ 804.034135] env[69927]: _type = "Task" [ 804.034135] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.047053] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522d7910-01a5-1f24-47f4-f16beb20c72e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.095640] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 21b7b237-557e-4030-93bb-6b5ce417e53c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 804.132154] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095775, 'name': Rename_Task, 'duration_secs': 0.188316} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.132488] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 804.132734] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6caa4a13-5c0f-49b8-ad1f-a2284f5dbfd4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.141531] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 804.141531] env[69927]: value = "task-4095776" [ 804.141531] env[69927]: _type = "Task" [ 804.141531] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.151599] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095776, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.188410] env[69927]: DEBUG nova.network.neutron [-] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.379351] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6a07c6f1-7d80-4b4e-bb37-5ebc2ca9ce63 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.394624] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20c7b2f-d7cf-42d5-954a-3ec9e4b1b04d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.437308] env[69927]: DEBUG nova.compute.manager [req-52bffe2d-da6b-4b86-9843-956b674467f6 req-bb16b796-5881-457d-8416-867d58e5d75b service nova] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Detach interface failed, port_id=6ab9e78c-be4f-450a-99bc-80d392d1c61f, reason: Instance cde9885b-1aa8-411d-847e-087fe375002b could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 804.532034] env[69927]: DEBUG nova.compute.manager [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 804.550348] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522d7910-01a5-1f24-47f4-f16beb20c72e, 'name': SearchDatastore_Task, 'duration_secs': 0.012389} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.551426] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f28dde1-c190-4301-aae0-5ba3bb509791 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.560725] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 804.560725] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523bb4c1-d899-8a9b-736b-9b04ad3fa1a6" [ 804.560725] env[69927]: _type = "Task" [ 804.560725] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.570699] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523bb4c1-d899-8a9b-736b-9b04ad3fa1a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.600023] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance a36b06ca-77c8-4d2f-8b43-2c160fbac93f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 804.615208] env[69927]: DEBUG nova.virt.hardware [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 804.615626] env[69927]: DEBUG nova.virt.hardware [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 804.615859] env[69927]: DEBUG nova.virt.hardware [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 804.616033] env[69927]: DEBUG nova.virt.hardware [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 804.616262] env[69927]: DEBUG nova.virt.hardware [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 804.616500] env[69927]: DEBUG nova.virt.hardware [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 804.616810] env[69927]: DEBUG nova.virt.hardware [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 804.617068] env[69927]: DEBUG nova.virt.hardware [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 804.617280] env[69927]: DEBUG nova.virt.hardware [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 804.617499] env[69927]: DEBUG nova.virt.hardware [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 804.617638] env[69927]: DEBUG nova.virt.hardware [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 804.618601] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9244ffb-ba26-47e4-ad2a-a319f431ce34 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.635245] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e43255b6-e1c8-4a70-a37c-05f829ffe2c3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.671599] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095776, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.691271] env[69927]: INFO nova.compute.manager [-] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Took 1.47 seconds to deallocate network for instance. [ 804.973843] env[69927]: DEBUG nova.objects.instance [None req-4a2fd03f-6e54-4666-9a9f-3f5601cadcb3 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lazy-loading 'flavor' on Instance uuid f6972b90-7746-4a37-8be8-1739f96dc3dc {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 805.072749] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523bb4c1-d899-8a9b-736b-9b04ad3fa1a6, 'name': SearchDatastore_Task, 'duration_secs': 0.014227} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.073062] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 805.073389] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 915797c5-6f68-4355-a6b0-ad2b06b826cb/915797c5-6f68-4355-a6b0-ad2b06b826cb.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 805.073676] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5c13773-f934-4bc3-a77d-c33128f7774b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.082240] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 805.082240] env[69927]: value = "task-4095777" [ 805.082240] env[69927]: _type = "Task" [ 805.082240] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.091579] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095777, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.100994] env[69927]: DEBUG nova.compute.manager [req-047de310-fc9c-497b-a18f-3cdca46dad07 req-77444d69-3438-40a4-bea1-06d5288ad74a service nova] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Received event network-vif-plugged-85825835-357a-42a3-81f4-b55d7e165b65 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 805.101288] env[69927]: DEBUG oslo_concurrency.lockutils [req-047de310-fc9c-497b-a18f-3cdca46dad07 req-77444d69-3438-40a4-bea1-06d5288ad74a service nova] Acquiring lock "256319c4-817d-4267-8531-a65f0f8cd0b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.101565] env[69927]: DEBUG oslo_concurrency.lockutils [req-047de310-fc9c-497b-a18f-3cdca46dad07 req-77444d69-3438-40a4-bea1-06d5288ad74a service nova] Lock "256319c4-817d-4267-8531-a65f0f8cd0b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.101857] env[69927]: DEBUG oslo_concurrency.lockutils [req-047de310-fc9c-497b-a18f-3cdca46dad07 req-77444d69-3438-40a4-bea1-06d5288ad74a service nova] Lock "256319c4-817d-4267-8531-a65f0f8cd0b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.102168] env[69927]: DEBUG nova.compute.manager [req-047de310-fc9c-497b-a18f-3cdca46dad07 req-77444d69-3438-40a4-bea1-06d5288ad74a service nova] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] No waiting events found dispatching network-vif-plugged-85825835-357a-42a3-81f4-b55d7e165b65 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 805.102376] env[69927]: WARNING nova.compute.manager [req-047de310-fc9c-497b-a18f-3cdca46dad07 req-77444d69-3438-40a4-bea1-06d5288ad74a service nova] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Received unexpected event network-vif-plugged-85825835-357a-42a3-81f4-b55d7e165b65 for instance with vm_state building and task_state spawning. [ 805.105595] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance ff227e07-8e36-48d6-a8c7-1e0087fd1faa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 805.129827] env[69927]: DEBUG nova.network.neutron [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Successfully updated port: 85825835-357a-42a3-81f4-b55d7e165b65 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 805.159147] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095776, 'name': PowerOnVM_Task, 'duration_secs': 0.540294} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.159495] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 805.159495] env[69927]: INFO nova.compute.manager [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Took 7.57 seconds to spawn the instance on the hypervisor. [ 805.159696] env[69927]: DEBUG nova.compute.manager [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 805.160815] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98e5802c-5881-40f0-bf86-145202551bfa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.198089] env[69927]: DEBUG oslo_concurrency.lockutils [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.483049] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4a2fd03f-6e54-4666-9a9f-3f5601cadcb3 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "f6972b90-7746-4a37-8be8-1739f96dc3dc" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.910s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.598730] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095777, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.608609] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance e0bca101-cf8d-48e1-a331-b0018548593e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 805.641270] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "refresh_cache-256319c4-817d-4267-8531-a65f0f8cd0b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.641270] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquired lock "refresh_cache-256319c4-817d-4267-8531-a65f0f8cd0b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 805.641270] env[69927]: DEBUG nova.network.neutron [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 805.642510] env[69927]: DEBUG nova.compute.manager [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 805.645773] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d44951-ea1b-487c-9aa3-ac3f18725296 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.682203] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d0337e1d-ceea-4d6e-91e0-7866741f6587 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquiring lock "95c02aa2-d587-4c9f-9b02-2992dfe5b1be" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.682203] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d0337e1d-ceea-4d6e-91e0-7866741f6587 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "95c02aa2-d587-4c9f-9b02-2992dfe5b1be" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.683268] env[69927]: INFO nova.compute.manager [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Took 51.46 seconds to build instance. [ 806.074374] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4792c70b-e020-4d6c-bb82-27cadfa239ee tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "f6972b90-7746-4a37-8be8-1739f96dc3dc" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.074374] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4792c70b-e020-4d6c-bb82-27cadfa239ee tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "f6972b90-7746-4a37-8be8-1739f96dc3dc" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.002s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 806.074374] env[69927]: DEBUG nova.compute.manager [None req-4792c70b-e020-4d6c-bb82-27cadfa239ee tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 806.076204] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc07ad73-dac4-4be4-8455-eef221810413 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.095136] env[69927]: DEBUG nova.compute.manager [None req-4792c70b-e020-4d6c-bb82-27cadfa239ee tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69927) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 806.095136] env[69927]: DEBUG nova.objects.instance [None req-4792c70b-e020-4d6c-bb82-27cadfa239ee tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lazy-loading 'flavor' on Instance uuid f6972b90-7746-4a37-8be8-1739f96dc3dc {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 806.102794] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095777, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.645324} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.103018] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 915797c5-6f68-4355-a6b0-ad2b06b826cb/915797c5-6f68-4355-a6b0-ad2b06b826cb.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 806.103253] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 806.103509] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c3166272-e153-43d1-becf-16709c1c1a48 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.112531] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance e1b3d0bc-a251-4dbd-89a6-216a2f2c1313 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 806.114151] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 806.114151] env[69927]: value = "task-4095778" [ 806.114151] env[69927]: _type = "Task" [ 806.114151] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.126573] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095778, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.163043] env[69927]: INFO nova.compute.manager [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] instance snapshotting [ 806.165447] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673378d2-7b1d-44b5-b141-6e81386d90e0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.185431] env[69927]: DEBUG nova.network.neutron [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 806.188224] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f629472a-5a19-4185-8a4f-abf091e885f6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.190934] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "0e6e60e7-d623-44da-912e-804da4d616c9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.355s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.191852] env[69927]: DEBUG nova.compute.utils [None req-d0337e1d-ceea-4d6e-91e0-7866741f6587 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 806.356859] env[69927]: DEBUG nova.network.neutron [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Updating instance_info_cache with network_info: [{"id": "85825835-357a-42a3-81f4-b55d7e165b65", "address": "fa:16:3e:f4:23:86", "network": {"id": "41648ae7-ad4c-489e-b90f-65446dc0b1d1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1522598229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c5a402c4ef2452b9809e30a2fe91431", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c8a5d7c-ee1f-4a41-94e4-db31e85a398d", "external-id": "cl2-zone-613", "segmentation_id": 613, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85825835-35", "ovs_interfaceid": "85825835-357a-42a3-81f4-b55d7e165b65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.616703] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 80fc9add-683b-424e-9876-cdcae664e2da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 806.628921] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095778, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.112302} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.630051] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 806.630872] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d2ca30-7c4e-4f2a-a870-dd44fe425c94 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.655848] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Reconfiguring VM instance instance-00000024 to attach disk [datastore2] 915797c5-6f68-4355-a6b0-ad2b06b826cb/915797c5-6f68-4355-a6b0-ad2b06b826cb.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 806.657059] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c6122a1-138a-4727-a659-2315819e2854 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.678497] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 806.678497] env[69927]: value = "task-4095779" [ 806.678497] env[69927]: _type = "Task" [ 806.678497] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.689316] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095779, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.696234] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d0337e1d-ceea-4d6e-91e0-7866741f6587 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "95c02aa2-d587-4c9f-9b02-2992dfe5b1be" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.015s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.696847] env[69927]: DEBUG nova.compute.manager [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 806.703888] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Creating Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 806.704211] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d9f68659-04b0-416b-8210-d1c9b10b2703 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.713719] env[69927]: DEBUG oslo_vmware.api [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 806.713719] env[69927]: value = "task-4095780" [ 806.713719] env[69927]: _type = "Task" [ 806.713719] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.723951] env[69927]: DEBUG oslo_vmware.api [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095780, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.860141] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Releasing lock "refresh_cache-256319c4-817d-4267-8531-a65f0f8cd0b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 806.860512] env[69927]: DEBUG nova.compute.manager [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Instance network_info: |[{"id": "85825835-357a-42a3-81f4-b55d7e165b65", "address": "fa:16:3e:f4:23:86", "network": {"id": "41648ae7-ad4c-489e-b90f-65446dc0b1d1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1522598229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c5a402c4ef2452b9809e30a2fe91431", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c8a5d7c-ee1f-4a41-94e4-db31e85a398d", "external-id": "cl2-zone-613", "segmentation_id": 613, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85825835-35", "ovs_interfaceid": "85825835-357a-42a3-81f4-b55d7e165b65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 806.860965] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:23:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4c8a5d7c-ee1f-4a41-94e4-db31e85a398d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '85825835-357a-42a3-81f4-b55d7e165b65', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 806.869063] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Creating folder: Project (1c5a402c4ef2452b9809e30a2fe91431). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 806.869770] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-94c02e82-814c-4651-a566-b244805ff1da {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.883017] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Created folder: Project (1c5a402c4ef2452b9809e30a2fe91431) in parent group-v811283. [ 806.883387] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Creating folder: Instances. Parent ref: group-v811402. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 806.883794] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-464ccf2d-dd99-4f51-95a4-1e059a0ba286 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.897034] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Created folder: Instances in parent group-v811402. [ 806.897449] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 806.897557] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 806.897807] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5a003601-6974-4ec6-ab9b-20eef2edcb66 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.918848] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 806.918848] env[69927]: value = "task-4095783" [ 806.918848] env[69927]: _type = "Task" [ 806.918848] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.927252] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095783, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.103154] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-4792c70b-e020-4d6c-bb82-27cadfa239ee tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 807.103557] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-35ee90e4-921d-48de-8498-28cd6f70a0a0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.115020] env[69927]: DEBUG oslo_vmware.api [None req-4792c70b-e020-4d6c-bb82-27cadfa239ee tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 807.115020] env[69927]: value = "task-4095784" [ 807.115020] env[69927]: _type = "Task" [ 807.115020] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.119692] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 8de4160d-2282-4ed3-bdf0-349445a6eab8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 807.125992] env[69927]: DEBUG oslo_vmware.api [None req-4792c70b-e020-4d6c-bb82-27cadfa239ee tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095784, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.193321] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095779, 'name': ReconfigVM_Task, 'duration_secs': 0.389986} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.193580] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Reconfigured VM instance instance-00000024 to attach disk [datastore2] 915797c5-6f68-4355-a6b0-ad2b06b826cb/915797c5-6f68-4355-a6b0-ad2b06b826cb.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 807.194644] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ee4ca2bf-cce2-4dc5-b9b4-eb1975b7db45 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.205079] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 807.205079] env[69927]: value = "task-4095785" [ 807.205079] env[69927]: _type = "Task" [ 807.205079] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.219492] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095785, 'name': Rename_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.226836] env[69927]: DEBUG oslo_concurrency.lockutils [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 807.231161] env[69927]: DEBUG oslo_vmware.api [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095780, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.399538] env[69927]: DEBUG oslo_vmware.rw_handles [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52dee46c-611c-a99e-73a5-d1d9d865fc7f/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 807.400880] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab97ba13-0571-4484-ad96-aeda6372461d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.408930] env[69927]: DEBUG oslo_vmware.rw_handles [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52dee46c-611c-a99e-73a5-d1d9d865fc7f/disk-0.vmdk is in state: ready. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 807.409184] env[69927]: ERROR oslo_vmware.rw_handles [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52dee46c-611c-a99e-73a5-d1d9d865fc7f/disk-0.vmdk due to incomplete transfer. [ 807.409489] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4a398e86-0737-4d87-94ce-a3199db63ab4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.418662] env[69927]: DEBUG oslo_vmware.rw_handles [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52dee46c-611c-a99e-73a5-d1d9d865fc7f/disk-0.vmdk. {{(pid=69927) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 807.419778] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Uploaded image dbff7fb5-0787-47eb-b176-68ad992a47c2 to the Glance image server {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 807.421410] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Destroying the VM {{(pid=69927) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 807.421747] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-edbf3e94-ff51-408b-b085-cd396b58bde3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.434059] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095783, 'name': CreateVM_Task, 'duration_secs': 0.427195} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.435342] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 807.435795] env[69927]: DEBUG oslo_vmware.api [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 807.435795] env[69927]: value = "task-4095786" [ 807.435795] env[69927]: _type = "Task" [ 807.435795] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.436555] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.436826] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 807.437198] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 807.437679] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b79fe9b-8422-4ce1-b8e9-68cc78a36d0a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.448594] env[69927]: DEBUG oslo_vmware.api [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 807.448594] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d6e3f4-d9a2-9445-c348-1319e5f4b247" [ 807.448594] env[69927]: _type = "Task" [ 807.448594] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.453273] env[69927]: DEBUG oslo_vmware.api [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095786, 'name': Destroy_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.462582] env[69927]: DEBUG oslo_vmware.api [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d6e3f4-d9a2-9445-c348-1319e5f4b247, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.623684] env[69927]: DEBUG nova.compute.manager [req-5e1762dc-e65e-480f-88f9-acf5e74a2c45 req-4d164091-1ab4-4344-9833-eb609c79776c service nova] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Received event network-changed-85825835-357a-42a3-81f4-b55d7e165b65 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 807.623942] env[69927]: DEBUG nova.compute.manager [req-5e1762dc-e65e-480f-88f9-acf5e74a2c45 req-4d164091-1ab4-4344-9833-eb609c79776c service nova] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Refreshing instance network info cache due to event network-changed-85825835-357a-42a3-81f4-b55d7e165b65. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 807.624124] env[69927]: DEBUG oslo_concurrency.lockutils [req-5e1762dc-e65e-480f-88f9-acf5e74a2c45 req-4d164091-1ab4-4344-9833-eb609c79776c service nova] Acquiring lock "refresh_cache-256319c4-817d-4267-8531-a65f0f8cd0b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.624292] env[69927]: DEBUG oslo_concurrency.lockutils [req-5e1762dc-e65e-480f-88f9-acf5e74a2c45 req-4d164091-1ab4-4344-9833-eb609c79776c service nova] Acquired lock "refresh_cache-256319c4-817d-4267-8531-a65f0f8cd0b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 807.624441] env[69927]: DEBUG nova.network.neutron [req-5e1762dc-e65e-480f-88f9-acf5e74a2c45 req-4d164091-1ab4-4344-9833-eb609c79776c service nova] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Refreshing network info cache for port 85825835-357a-42a3-81f4-b55d7e165b65 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 807.629616] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance c7451ca3-f1fc-469b-b9d2-7fe24cb8949e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 807.630968] env[69927]: DEBUG oslo_vmware.api [None req-4792c70b-e020-4d6c-bb82-27cadfa239ee tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095784, 'name': PowerOffVM_Task, 'duration_secs': 0.273058} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.631984] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-4792c70b-e020-4d6c-bb82-27cadfa239ee tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 807.631984] env[69927]: DEBUG nova.compute.manager [None req-4792c70b-e020-4d6c-bb82-27cadfa239ee tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 807.632651] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f1d967-931a-4724-ab59-6292758dd480 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.718944] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095785, 'name': Rename_Task, 'duration_secs': 0.232028} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.720100] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 807.722608] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-59e9cdb0-cc61-46de-bbfb-e3ed6b4c9a32 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.730870] env[69927]: DEBUG oslo_vmware.api [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095780, 'name': CreateSnapshot_Task, 'duration_secs': 0.799969} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.732585] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Created Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 807.733143] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 807.733143] env[69927]: value = "task-4095787" [ 807.733143] env[69927]: _type = "Task" [ 807.733143] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.734348] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8cd2f8e-6ed5-423f-9aee-93245ac91dd2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.761789] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095787, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.777970] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d0337e1d-ceea-4d6e-91e0-7866741f6587 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquiring lock "95c02aa2-d587-4c9f-9b02-2992dfe5b1be" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 807.778291] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d0337e1d-ceea-4d6e-91e0-7866741f6587 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "95c02aa2-d587-4c9f-9b02-2992dfe5b1be" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 807.778564] env[69927]: INFO nova.compute.manager [None req-d0337e1d-ceea-4d6e-91e0-7866741f6587 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Attaching volume cd1547e6-ecf1-4990-bb85-9ad80fce4b95 to /dev/sdb [ 807.823847] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c22184dc-531e-45d6-a919-4b92e127855c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.832209] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e0dd93-492c-4549-8b40-61a4f86c2560 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.847950] env[69927]: DEBUG nova.virt.block_device [None req-d0337e1d-ceea-4d6e-91e0-7866741f6587 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Updating existing volume attachment record: 6251929e-6781-45b9-ba26-169c7f787680 {{(pid=69927) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 807.948063] env[69927]: DEBUG oslo_vmware.api [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095786, 'name': Destroy_Task, 'duration_secs': 0.364828} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.948465] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Destroyed the VM [ 807.948730] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Deleting Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 807.949258] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-dec72e36-7215-4d29-885c-413c572bf019 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.958868] env[69927]: DEBUG oslo_vmware.api [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 807.958868] env[69927]: value = "task-4095788" [ 807.958868] env[69927]: _type = "Task" [ 807.958868] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.968824] env[69927]: DEBUG oslo_vmware.api [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d6e3f4-d9a2-9445-c348-1319e5f4b247, 'name': SearchDatastore_Task, 'duration_secs': 0.014989} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.969774] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 807.970114] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 807.970429] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.970604] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 807.970892] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 807.971246] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e4872b5e-cda1-43c8-945e-6996535cf640 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.977121] env[69927]: DEBUG oslo_vmware.api [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095788, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.986965] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 807.987188] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 807.988016] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdeaba45-596e-4b4d-a471-ea610eb92185 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.993893] env[69927]: DEBUG oslo_vmware.api [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 807.993893] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a60ce9-8450-8c89-2127-7247c3ca91f4" [ 807.993893] env[69927]: _type = "Task" [ 807.993893] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.003447] env[69927]: DEBUG oslo_vmware.api [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a60ce9-8450-8c89-2127-7247c3ca91f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.138788] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 6be47dcb-ce00-4b81-9e69-35acabac046e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 808.148842] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4792c70b-e020-4d6c-bb82-27cadfa239ee tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "f6972b90-7746-4a37-8be8-1739f96dc3dc" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.075s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.251726] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095787, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.266618] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Creating linked-clone VM from snapshot {{(pid=69927) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 808.269528] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-abbb0042-76c8-4ccd-8046-15ba82273e9f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.280022] env[69927]: DEBUG oslo_vmware.api [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 808.280022] env[69927]: value = "task-4095792" [ 808.280022] env[69927]: _type = "Task" [ 808.280022] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.290879] env[69927]: DEBUG oslo_vmware.api [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095792, 'name': CloneVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.435427] env[69927]: DEBUG nova.network.neutron [req-5e1762dc-e65e-480f-88f9-acf5e74a2c45 req-4d164091-1ab4-4344-9833-eb609c79776c service nova] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Updated VIF entry in instance network info cache for port 85825835-357a-42a3-81f4-b55d7e165b65. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 808.436039] env[69927]: DEBUG nova.network.neutron [req-5e1762dc-e65e-480f-88f9-acf5e74a2c45 req-4d164091-1ab4-4344-9833-eb609c79776c service nova] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Updating instance_info_cache with network_info: [{"id": "85825835-357a-42a3-81f4-b55d7e165b65", "address": "fa:16:3e:f4:23:86", "network": {"id": "41648ae7-ad4c-489e-b90f-65446dc0b1d1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1522598229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c5a402c4ef2452b9809e30a2fe91431", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c8a5d7c-ee1f-4a41-94e4-db31e85a398d", "external-id": "cl2-zone-613", "segmentation_id": 613, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85825835-35", "ovs_interfaceid": "85825835-357a-42a3-81f4-b55d7e165b65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.471303] env[69927]: DEBUG oslo_vmware.api [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095788, 'name': RemoveSnapshot_Task} progress is 72%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.507048] env[69927]: DEBUG oslo_vmware.api [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a60ce9-8450-8c89-2127-7247c3ca91f4, 'name': SearchDatastore_Task, 'duration_secs': 0.013276} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.508158] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8d9853a-bf9b-48d8-9d29-500583351963 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.515523] env[69927]: DEBUG oslo_vmware.api [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 808.515523] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526761fb-234e-49dc-df68-643193596042" [ 808.515523] env[69927]: _type = "Task" [ 808.515523] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.526325] env[69927]: DEBUG oslo_vmware.api [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526761fb-234e-49dc-df68-643193596042, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.641644] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance c3e8a429-8484-4b11-abe3-1cccf0992556 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 808.750787] env[69927]: DEBUG oslo_vmware.api [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095787, 'name': PowerOnVM_Task, 'duration_secs': 0.993419} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.751109] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 808.751705] env[69927]: INFO nova.compute.manager [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Took 8.45 seconds to spawn the instance on the hypervisor. [ 808.751705] env[69927]: DEBUG nova.compute.manager [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 808.752462] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-138a98fb-1809-45f8-872c-b414002ad68f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.792670] env[69927]: DEBUG oslo_vmware.api [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095792, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.939043] env[69927]: DEBUG oslo_concurrency.lockutils [req-5e1762dc-e65e-480f-88f9-acf5e74a2c45 req-4d164091-1ab4-4344-9833-eb609c79776c service nova] Releasing lock "refresh_cache-256319c4-817d-4267-8531-a65f0f8cd0b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 808.971065] env[69927]: DEBUG oslo_vmware.api [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095788, 'name': RemoveSnapshot_Task, 'duration_secs': 0.645689} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.972128] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Deleted Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 808.972128] env[69927]: INFO nova.compute.manager [None req-d6aa221b-3ddd-464a-990a-af7eb36bad4b tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Took 14.40 seconds to snapshot the instance on the hypervisor. [ 809.028170] env[69927]: DEBUG oslo_vmware.api [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526761fb-234e-49dc-df68-643193596042, 'name': SearchDatastore_Task, 'duration_secs': 0.012687} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.028448] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 809.028726] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 256319c4-817d-4267-8531-a65f0f8cd0b6/256319c4-817d-4267-8531-a65f0f8cd0b6.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 809.029018] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7de0aa82-8896-4310-9ee1-a60543bd1bce {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.037600] env[69927]: DEBUG oslo_vmware.api [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 809.037600] env[69927]: value = "task-4095793" [ 809.037600] env[69927]: _type = "Task" [ 809.037600] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.048037] env[69927]: DEBUG oslo_vmware.api [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4095793, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.145681] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 66ba8086-2dd4-4d02-aac3-1bbb4a404784 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 809.275274] env[69927]: INFO nova.compute.manager [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Took 50.41 seconds to build instance. [ 809.296878] env[69927]: DEBUG oslo_vmware.api [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095792, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.297815] env[69927]: DEBUG nova.objects.instance [None req-4143c253-f8f5-4937-a2e8-c915a1665ac9 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lazy-loading 'flavor' on Instance uuid f6972b90-7746-4a37-8be8-1739f96dc3dc {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 809.550560] env[69927]: DEBUG oslo_vmware.api [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4095793, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506747} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.551072] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 256319c4-817d-4267-8531-a65f0f8cd0b6/256319c4-817d-4267-8531-a65f0f8cd0b6.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 809.551420] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 809.551617] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5e139f5e-ce20-42cf-b164-b91f17164ec1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.558787] env[69927]: DEBUG oslo_vmware.api [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 809.558787] env[69927]: value = "task-4095794" [ 809.558787] env[69927]: _type = "Task" [ 809.558787] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.569158] env[69927]: DEBUG oslo_vmware.api [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4095794, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.655968] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance b67630a4-2c1a-440b-af82-80c908ffa6e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 809.686278] env[69927]: DEBUG oslo_concurrency.lockutils [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "915797c5-6f68-4355-a6b0-ad2b06b826cb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.782024] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1c0e1eab-db14-4069-bb96-b7700759b7fd tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "915797c5-6f68-4355-a6b0-ad2b06b826cb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.909s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 809.782791] env[69927]: DEBUG oslo_concurrency.lockutils [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "915797c5-6f68-4355-a6b0-ad2b06b826cb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.097s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.783209] env[69927]: DEBUG oslo_concurrency.lockutils [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "915797c5-6f68-4355-a6b0-ad2b06b826cb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.783570] env[69927]: DEBUG oslo_concurrency.lockutils [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "915797c5-6f68-4355-a6b0-ad2b06b826cb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.784000] env[69927]: DEBUG oslo_concurrency.lockutils [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "915797c5-6f68-4355-a6b0-ad2b06b826cb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 809.786778] env[69927]: INFO nova.compute.manager [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Terminating instance [ 809.806130] env[69927]: DEBUG oslo_vmware.api [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095792, 'name': CloneVM_Task} progress is 95%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.806640] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4143c253-f8f5-4937-a2e8-c915a1665ac9 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "refresh_cache-f6972b90-7746-4a37-8be8-1739f96dc3dc" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.806796] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4143c253-f8f5-4937-a2e8-c915a1665ac9 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquired lock "refresh_cache-f6972b90-7746-4a37-8be8-1739f96dc3dc" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 809.806965] env[69927]: DEBUG nova.network.neutron [None req-4143c253-f8f5-4937-a2e8-c915a1665ac9 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 809.807148] env[69927]: DEBUG nova.objects.instance [None req-4143c253-f8f5-4937-a2e8-c915a1665ac9 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lazy-loading 'info_cache' on Instance uuid f6972b90-7746-4a37-8be8-1739f96dc3dc {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 809.910110] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "0e6e60e7-d623-44da-912e-804da4d616c9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.910604] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "0e6e60e7-d623-44da-912e-804da4d616c9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.911601] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "0e6e60e7-d623-44da-912e-804da4d616c9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.913266] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "0e6e60e7-d623-44da-912e-804da4d616c9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.913266] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "0e6e60e7-d623-44da-912e-804da4d616c9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 809.916321] env[69927]: INFO nova.compute.manager [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Terminating instance [ 810.070016] env[69927]: DEBUG oslo_vmware.api [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4095794, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.336647} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.070332] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 810.071173] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3965d35-6d15-4bb3-b228-24c9f6e5f67b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.096024] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Reconfiguring VM instance instance-00000025 to attach disk [datastore2] 256319c4-817d-4267-8531-a65f0f8cd0b6/256319c4-817d-4267-8531-a65f0f8cd0b6.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 810.096024] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-388b719d-c586-42ed-b4a1-14ab3758bb56 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.115522] env[69927]: DEBUG oslo_vmware.api [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 810.115522] env[69927]: value = "task-4095795" [ 810.115522] env[69927]: _type = "Task" [ 810.115522] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.130951] env[69927]: DEBUG oslo_vmware.api [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4095795, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.159925] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance a2b1684f-82af-42fc-925e-db36f31cfe63 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 810.289998] env[69927]: DEBUG nova.compute.manager [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 810.298469] env[69927]: DEBUG nova.compute.manager [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 810.298469] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 810.298761] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dfb0b49-0c98-44d1-a32c-72b1de2f3bc0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.308752] env[69927]: DEBUG oslo_vmware.api [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095792, 'name': CloneVM_Task, 'duration_secs': 1.952852} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.311246] env[69927]: INFO nova.virt.vmwareapi.vmops [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Created linked-clone VM from snapshot [ 810.312566] env[69927]: DEBUG nova.objects.base [None req-4143c253-f8f5-4937-a2e8-c915a1665ac9 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=69927) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 810.313891] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 810.314888] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8909f8e0-4ca2-4a47-a256-739e0090e80a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.317540] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7291c243-6d46-4d5b-af97-b5cbcb15b790 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.327795] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Uploading image be3aab07-1817-4586-96c7-733173ab206c {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 810.332622] env[69927]: DEBUG oslo_vmware.api [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 810.332622] env[69927]: value = "task-4095797" [ 810.332622] env[69927]: _type = "Task" [ 810.332622] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.343742] env[69927]: DEBUG oslo_vmware.api [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095797, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.350246] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Destroying the VM {{(pid=69927) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 810.350246] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f72253c4-ede2-4b09-8e6c-562a08e4ca9c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.357196] env[69927]: DEBUG oslo_vmware.api [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 810.357196] env[69927]: value = "task-4095798" [ 810.357196] env[69927]: _type = "Task" [ 810.357196] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.366886] env[69927]: DEBUG oslo_vmware.api [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095798, 'name': Destroy_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.422278] env[69927]: DEBUG nova.compute.manager [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 810.422278] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 810.423064] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d92fbdd-2a6f-41f9-8d73-ba31586f7faf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.431822] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 810.432155] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9be9eabb-d08c-436b-9125-24ad028f682c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.440031] env[69927]: DEBUG oslo_vmware.api [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 810.440031] env[69927]: value = "task-4095799" [ 810.440031] env[69927]: _type = "Task" [ 810.440031] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.453029] env[69927]: DEBUG oslo_vmware.api [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095799, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.630613] env[69927]: DEBUG oslo_vmware.api [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4095795, 'name': ReconfigVM_Task, 'duration_secs': 0.333168} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.630613] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Reconfigured VM instance instance-00000025 to attach disk [datastore2] 256319c4-817d-4267-8531-a65f0f8cd0b6/256319c4-817d-4267-8531-a65f0f8cd0b6.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 810.631533] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3a9b058a-38e0-497c-91ba-1669f300e10a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.640959] env[69927]: DEBUG oslo_vmware.api [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 810.640959] env[69927]: value = "task-4095800" [ 810.640959] env[69927]: _type = "Task" [ 810.640959] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.652175] env[69927]: DEBUG oslo_vmware.api [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4095800, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.665452] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 480a672c-cb48-45e3-86bd-1741957a5124 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 810.818914] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.843581] env[69927]: DEBUG oslo_vmware.api [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095797, 'name': PowerOffVM_Task, 'duration_secs': 0.343216} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.845254] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 810.845254] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 810.845254] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-75d56ee8-6336-43ce-8214-df9c0c395f63 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.867706] env[69927]: DEBUG oslo_vmware.api [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095798, 'name': Destroy_Task} progress is 33%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.941605] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 810.941832] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 810.942113] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Deleting the datastore file [datastore2] 915797c5-6f68-4355-a6b0-ad2b06b826cb {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 810.946518] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-674a53a2-96d1-462a-b54a-3565383a5d5e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.955036] env[69927]: DEBUG oslo_vmware.api [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095799, 'name': PowerOffVM_Task, 'duration_secs': 0.238327} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.956617] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 810.956949] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 810.957181] env[69927]: DEBUG oslo_vmware.api [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 810.957181] env[69927]: value = "task-4095802" [ 810.957181] env[69927]: _type = "Task" [ 810.957181] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.957386] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f6f1755c-d76c-4181-8a34-8fe3310fc386 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.969116] env[69927]: DEBUG oslo_vmware.api [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095802, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.037821] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 811.038089] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 811.038266] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Deleting the datastore file [datastore1] 0e6e60e7-d623-44da-912e-804da4d616c9 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 811.038592] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2e1fd3b5-3a47-41b1-9b18-1446f4e060a3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.047259] env[69927]: DEBUG oslo_vmware.api [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 811.047259] env[69927]: value = "task-4095804" [ 811.047259] env[69927]: _type = "Task" [ 811.047259] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.061496] env[69927]: DEBUG oslo_vmware.api [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095804, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.091152] env[69927]: DEBUG nova.network.neutron [None req-4143c253-f8f5-4937-a2e8-c915a1665ac9 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Updating instance_info_cache with network_info: [{"id": "2d989e8c-d768-494a-a866-4da8ff809d05", "address": "fa:16:3e:02:b9:e7", "network": {"id": "e48c0c04-ce1d-41db-8e56-0954dfca129f", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2007911751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0b560d18954fd68f7eceeb96c37055", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d989e8c-d7", "ovs_interfaceid": "2d989e8c-d768-494a-a866-4da8ff809d05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.157645] env[69927]: DEBUG oslo_vmware.api [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4095800, 'name': Rename_Task, 'duration_secs': 0.1617} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.157645] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 811.157761] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7ee89237-22f6-43d7-8b58-fdb0263f3f6b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.168585] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance cb35090d-bfd2-46df-8ee5-d9b068ba0a28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 811.170116] env[69927]: DEBUG oslo_vmware.api [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 811.170116] env[69927]: value = "task-4095805" [ 811.170116] env[69927]: _type = "Task" [ 811.170116] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.183885] env[69927]: DEBUG oslo_vmware.api [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4095805, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.368983] env[69927]: DEBUG oslo_vmware.api [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095798, 'name': Destroy_Task, 'duration_secs': 0.736688} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.369912] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Destroyed the VM [ 811.369912] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Deleting Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 811.369912] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d4c57eb5-d62c-4a16-bc25-842a717608cf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.377417] env[69927]: DEBUG oslo_vmware.api [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 811.377417] env[69927]: value = "task-4095806" [ 811.377417] env[69927]: _type = "Task" [ 811.377417] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.386890] env[69927]: DEBUG oslo_vmware.api [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095806, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.471836] env[69927]: DEBUG oslo_vmware.api [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095802, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153311} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.472122] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 811.472311] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 811.472550] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 811.472673] env[69927]: INFO nova.compute.manager [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Took 1.17 seconds to destroy the instance on the hypervisor. [ 811.472907] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 811.473118] env[69927]: DEBUG nova.compute.manager [-] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 811.473214] env[69927]: DEBUG nova.network.neutron [-] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 811.558285] env[69927]: DEBUG oslo_vmware.api [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4095804, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.231996} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.559039] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 811.559039] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 811.559039] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 811.559226] env[69927]: INFO nova.compute.manager [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Took 1.14 seconds to destroy the instance on the hypervisor. [ 811.559484] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 811.559868] env[69927]: DEBUG nova.compute.manager [-] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 811.559868] env[69927]: DEBUG nova.network.neutron [-] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 811.593830] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4143c253-f8f5-4937-a2e8-c915a1665ac9 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Releasing lock "refresh_cache-f6972b90-7746-4a37-8be8-1739f96dc3dc" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 811.681351] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 811.681786] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 811.682091] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3840MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 811.693893] env[69927]: DEBUG oslo_vmware.api [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4095805, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.888257] env[69927]: DEBUG oslo_vmware.api [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095806, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.933669] env[69927]: DEBUG nova.compute.manager [req-669e3e1e-af24-480b-b6e3-b0ab1ec5d4b8 req-8472b6e6-04f0-454f-ae6d-19f353814ff8 service nova] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Received event network-vif-deleted-21102681-d376-4436-a4e3-250936a48728 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 811.933916] env[69927]: INFO nova.compute.manager [req-669e3e1e-af24-480b-b6e3-b0ab1ec5d4b8 req-8472b6e6-04f0-454f-ae6d-19f353814ff8 service nova] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Neutron deleted interface 21102681-d376-4436-a4e3-250936a48728; detaching it from the instance and deleting it from the info cache [ 811.934140] env[69927]: DEBUG nova.network.neutron [req-669e3e1e-af24-480b-b6e3-b0ab1ec5d4b8 req-8472b6e6-04f0-454f-ae6d-19f353814ff8 service nova] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.059036] env[69927]: DEBUG nova.compute.manager [req-8f6013a6-114e-4b6c-bb13-49637fead881 req-101af335-5926-4e86-9614-7d4a30c2b9e4 service nova] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Received event network-vif-deleted-5e1c11d7-9069-43ec-8135-0682b4d7d9f9 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 812.059459] env[69927]: INFO nova.compute.manager [req-8f6013a6-114e-4b6c-bb13-49637fead881 req-101af335-5926-4e86-9614-7d4a30c2b9e4 service nova] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Neutron deleted interface 5e1c11d7-9069-43ec-8135-0682b4d7d9f9; detaching it from the instance and deleting it from the info cache [ 812.059543] env[69927]: DEBUG nova.network.neutron [req-8f6013a6-114e-4b6c-bb13-49637fead881 req-101af335-5926-4e86-9614-7d4a30c2b9e4 service nova] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.182677] env[69927]: DEBUG oslo_vmware.api [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4095805, 'name': PowerOnVM_Task, 'duration_secs': 0.529172} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.182677] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 812.182839] env[69927]: INFO nova.compute.manager [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Took 7.65 seconds to spawn the instance on the hypervisor. [ 812.183150] env[69927]: DEBUG nova.compute.manager [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 812.184051] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1dc4d5c-6774-47b6-909f-4cbe4711dba5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.272391] env[69927]: DEBUG nova.network.neutron [-] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.314849] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17db11ef-c6de-49fb-b04d-a904503c530b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.323359] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66e2b83-cc56-4bda-9554-17eeb69c1375 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.356954] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-102c7940-b891-4889-9df9-84d09329532b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.365850] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5baa6aa-d3d4-435c-a481-06a58171dff7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.381340] env[69927]: DEBUG nova.compute.provider_tree [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 812.391144] env[69927]: DEBUG oslo_vmware.api [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095806, 'name': RemoveSnapshot_Task, 'duration_secs': 0.708645} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.391408] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Deleted Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 812.399358] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0337e1d-ceea-4d6e-91e0-7866741f6587 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Volume attach. Driver type: vmdk {{(pid=69927) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 812.399578] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0337e1d-ceea-4d6e-91e0-7866741f6587 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811407', 'volume_id': 'cd1547e6-ecf1-4990-bb85-9ad80fce4b95', 'name': 'volume-cd1547e6-ecf1-4990-bb85-9ad80fce4b95', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '95c02aa2-d587-4c9f-9b02-2992dfe5b1be', 'attached_at': '', 'detached_at': '', 'volume_id': 'cd1547e6-ecf1-4990-bb85-9ad80fce4b95', 'serial': 'cd1547e6-ecf1-4990-bb85-9ad80fce4b95'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 812.400414] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d942aa1-5027-42cf-9081-6d9c8402da3a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.421434] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2cbc00-0ab1-41af-bc91-f659fe541dc1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.424614] env[69927]: DEBUG nova.network.neutron [-] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.450751] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0337e1d-ceea-4d6e-91e0-7866741f6587 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Reconfiguring VM instance instance-0000001d to attach disk [datastore2] volume-cd1547e6-ecf1-4990-bb85-9ad80fce4b95/volume-cd1547e6-ecf1-4990-bb85-9ad80fce4b95.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 812.451111] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ec90352e-c460-4efa-93a4-b79461b55231 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.453758] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4dc2a11f-602e-4a45-9751-bad55c8c2714 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.470021] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Acquiring lock "14359034-232d-478f-bf65-cf9937c59229" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.470021] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lock "14359034-232d-478f-bf65-cf9937c59229" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 812.470021] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Acquiring lock "14359034-232d-478f-bf65-cf9937c59229-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.470021] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lock "14359034-232d-478f-bf65-cf9937c59229-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 812.470273] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lock "14359034-232d-478f-bf65-cf9937c59229-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 812.471101] env[69927]: INFO nova.compute.manager [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Terminating instance [ 812.480433] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f494895b-b956-4374-8263-2a538e41d88e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.491263] env[69927]: DEBUG oslo_vmware.api [None req-d0337e1d-ceea-4d6e-91e0-7866741f6587 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for the task: (returnval){ [ 812.491263] env[69927]: value = "task-4095807" [ 812.491263] env[69927]: _type = "Task" [ 812.491263] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.502487] env[69927]: DEBUG oslo_vmware.api [None req-d0337e1d-ceea-4d6e-91e0-7866741f6587 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4095807, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.515392] env[69927]: DEBUG nova.compute.manager [req-669e3e1e-af24-480b-b6e3-b0ab1ec5d4b8 req-8472b6e6-04f0-454f-ae6d-19f353814ff8 service nova] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Detach interface failed, port_id=21102681-d376-4436-a4e3-250936a48728, reason: Instance 915797c5-6f68-4355-a6b0-ad2b06b826cb could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 812.567290] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-57c6262d-adc8-4c47-ab36-f31afd0642f0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.576781] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837cf8cc-87b2-4fbd-89a1-ec234045d51b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.609756] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-4143c253-f8f5-4937-a2e8-c915a1665ac9 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 812.610138] env[69927]: DEBUG nova.compute.manager [req-8f6013a6-114e-4b6c-bb13-49637fead881 req-101af335-5926-4e86-9614-7d4a30c2b9e4 service nova] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Detach interface failed, port_id=5e1c11d7-9069-43ec-8135-0682b4d7d9f9, reason: Instance 0e6e60e7-d623-44da-912e-804da4d616c9 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 812.610532] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8e61e361-3b0f-4239-bea5-59b428293bdc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.618044] env[69927]: DEBUG oslo_vmware.api [None req-4143c253-f8f5-4937-a2e8-c915a1665ac9 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 812.618044] env[69927]: value = "task-4095808" [ 812.618044] env[69927]: _type = "Task" [ 812.618044] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.626966] env[69927]: DEBUG oslo_vmware.api [None req-4143c253-f8f5-4937-a2e8-c915a1665ac9 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095808, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.708160] env[69927]: INFO nova.compute.manager [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Took 53.45 seconds to build instance. [ 812.775115] env[69927]: INFO nova.compute.manager [-] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Took 1.30 seconds to deallocate network for instance. [ 812.898747] env[69927]: WARNING nova.compute.manager [None req-aa22d8d2-57ea-4edd-9896-ce97902194f3 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Image not found during snapshot: nova.exception.ImageNotFound: Image be3aab07-1817-4586-96c7-733173ab206c could not be found. [ 812.920939] env[69927]: DEBUG nova.scheduler.client.report [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Updated inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 with generation 64 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 812.921273] env[69927]: DEBUG nova.compute.provider_tree [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Updating resource provider 2f529b36-df5f-4b37-8103-68f74f737726 generation from 64 to 65 during operation: update_inventory {{(pid=69927) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 812.921460] env[69927]: DEBUG nova.compute.provider_tree [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 812.927260] env[69927]: INFO nova.compute.manager [-] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Took 1.37 seconds to deallocate network for instance. [ 812.993173] env[69927]: DEBUG nova.compute.manager [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 812.993413] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 812.994509] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a335d9-c902-487b-987b-d910e5a79041 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.007921] env[69927]: DEBUG oslo_vmware.api [None req-d0337e1d-ceea-4d6e-91e0-7866741f6587 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4095807, 'name': ReconfigVM_Task, 'duration_secs': 0.449779} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.011491] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0337e1d-ceea-4d6e-91e0-7866741f6587 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Reconfigured VM instance instance-0000001d to attach disk [datastore2] volume-cd1547e6-ecf1-4990-bb85-9ad80fce4b95/volume-cd1547e6-ecf1-4990-bb85-9ad80fce4b95.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 813.017267] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 813.017596] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55fa33aa-c7b1-43af-8bd4-29349942c15c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.028824] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ecb3863d-28b2-4333-82ce-e778634b88aa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.036926] env[69927]: DEBUG oslo_vmware.api [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 813.036926] env[69927]: value = "task-4095809" [ 813.036926] env[69927]: _type = "Task" [ 813.036926] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.042512] env[69927]: DEBUG oslo_vmware.api [None req-d0337e1d-ceea-4d6e-91e0-7866741f6587 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for the task: (returnval){ [ 813.042512] env[69927]: value = "task-4095810" [ 813.042512] env[69927]: _type = "Task" [ 813.042512] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.053074] env[69927]: DEBUG oslo_vmware.api [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095809, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.057542] env[69927]: DEBUG oslo_vmware.api [None req-d0337e1d-ceea-4d6e-91e0-7866741f6587 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4095810, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.130684] env[69927]: DEBUG oslo_vmware.api [None req-4143c253-f8f5-4937-a2e8-c915a1665ac9 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095808, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.209626] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5a92074e-bdb4-431d-a11e-a1d0c051c00e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "256319c4-817d-4267-8531-a65f0f8cd0b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.292s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.282189] env[69927]: DEBUG oslo_concurrency.lockutils [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.407126] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquiring lock "353ceb53-07e6-4e9b-bed5-ce9fca368b27" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.407126] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "353ceb53-07e6-4e9b-bed5-ce9fca368b27" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.407126] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquiring lock "353ceb53-07e6-4e9b-bed5-ce9fca368b27-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.407126] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "353ceb53-07e6-4e9b-bed5-ce9fca368b27-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.407326] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "353ceb53-07e6-4e9b-bed5-ce9fca368b27-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.408058] env[69927]: INFO nova.compute.manager [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Terminating instance [ 813.430033] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69927) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 813.430033] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 10.919s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.430033] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.239s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.431497] env[69927]: INFO nova.compute.claims [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 813.437036] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.551218] env[69927]: DEBUG oslo_vmware.api [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095809, 'name': PowerOffVM_Task, 'duration_secs': 0.397591} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.554285] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 813.554285] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 813.554285] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7e03a2f0-63e7-4537-b2a7-4aa6312e8de2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.558423] env[69927]: DEBUG oslo_vmware.api [None req-d0337e1d-ceea-4d6e-91e0-7866741f6587 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4095810, 'name': ReconfigVM_Task, 'duration_secs': 0.199003} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.560156] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0337e1d-ceea-4d6e-91e0-7866741f6587 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811407', 'volume_id': 'cd1547e6-ecf1-4990-bb85-9ad80fce4b95', 'name': 'volume-cd1547e6-ecf1-4990-bb85-9ad80fce4b95', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '95c02aa2-d587-4c9f-9b02-2992dfe5b1be', 'attached_at': '', 'detached_at': '', 'volume_id': 'cd1547e6-ecf1-4990-bb85-9ad80fce4b95', 'serial': 'cd1547e6-ecf1-4990-bb85-9ad80fce4b95'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 813.628937] env[69927]: DEBUG oslo_vmware.api [None req-4143c253-f8f5-4937-a2e8-c915a1665ac9 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095808, 'name': PowerOnVM_Task, 'duration_secs': 0.524754} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.629350] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-4143c253-f8f5-4937-a2e8-c915a1665ac9 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 813.629572] env[69927]: DEBUG nova.compute.manager [None req-4143c253-f8f5-4937-a2e8-c915a1665ac9 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 813.630544] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e9f63e-c8bc-4ce5-b20c-8d26d0802326 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.634702] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 813.634973] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 813.635210] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Deleting the datastore file [datastore2] 14359034-232d-478f-bf65-cf9937c59229 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 813.636099] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-03d62aa5-f9b5-48c2-843f-5ea23d100bde {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.644282] env[69927]: DEBUG oslo_vmware.api [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 813.644282] env[69927]: value = "task-4095812" [ 813.644282] env[69927]: _type = "Task" [ 813.644282] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.654105] env[69927]: DEBUG oslo_vmware.api [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095812, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.712717] env[69927]: DEBUG nova.compute.manager [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 813.916948] env[69927]: DEBUG nova.compute.manager [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 813.917230] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 813.918111] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b77085-8bc4-4826-b2c5-33e5d97aeb12 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.927438] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 813.927815] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4dbc5d59-8e95-461d-8d56-d79a35806c4e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.935953] env[69927]: DEBUG oslo_vmware.api [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 813.935953] env[69927]: value = "task-4095813" [ 813.935953] env[69927]: _type = "Task" [ 813.935953] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.949029] env[69927]: DEBUG oslo_vmware.api [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095813, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.161876] env[69927]: DEBUG oslo_vmware.api [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095812, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.250347} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.162338] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 814.162671] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 814.162944] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 814.163265] env[69927]: INFO nova.compute.manager [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: 14359034-232d-478f-bf65-cf9937c59229] Took 1.17 seconds to destroy the instance on the hypervisor. [ 814.163655] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 814.163944] env[69927]: DEBUG nova.compute.manager [-] [instance: 14359034-232d-478f-bf65-cf9937c59229] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 814.164124] env[69927]: DEBUG nova.network.neutron [-] [instance: 14359034-232d-478f-bf65-cf9937c59229] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 814.241632] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 814.449773] env[69927]: DEBUG oslo_vmware.api [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095813, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.564088] env[69927]: DEBUG nova.compute.manager [req-e1c15623-8ac3-4348-84b7-979740001f95 req-cef925a1-8e0c-4941-b272-0da148b92304 service nova] [instance: 14359034-232d-478f-bf65-cf9937c59229] Received event network-vif-deleted-0d02a2be-1a9c-48c3-93c8-28b312303384 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 814.564288] env[69927]: INFO nova.compute.manager [req-e1c15623-8ac3-4348-84b7-979740001f95 req-cef925a1-8e0c-4941-b272-0da148b92304 service nova] [instance: 14359034-232d-478f-bf65-cf9937c59229] Neutron deleted interface 0d02a2be-1a9c-48c3-93c8-28b312303384; detaching it from the instance and deleting it from the info cache [ 814.564632] env[69927]: DEBUG nova.network.neutron [req-e1c15623-8ac3-4348-84b7-979740001f95 req-cef925a1-8e0c-4941-b272-0da148b92304 service nova] [instance: 14359034-232d-478f-bf65-cf9937c59229] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.603089] env[69927]: DEBUG nova.objects.instance [None req-d0337e1d-ceea-4d6e-91e0-7866741f6587 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lazy-loading 'flavor' on Instance uuid 95c02aa2-d587-4c9f-9b02-2992dfe5b1be {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 814.952211] env[69927]: DEBUG oslo_vmware.api [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095813, 'name': PowerOffVM_Task, 'duration_secs': 0.525818} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.952790] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 814.952978] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 814.953368] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8edd9821-064e-474e-9a7a-9f81ba9c02ad {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.997945] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-424dcc30-045b-453a-83d6-703def33df9d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.006536] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4faa99a5-2da5-4b1d-beba-d2d8652fd6ea {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.047550] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-407afb9a-81e7-4b95-8722-92d8f39ed834 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.050393] env[69927]: DEBUG nova.network.neutron [-] [instance: 14359034-232d-478f-bf65-cf9937c59229] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.051786] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 815.051952] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 815.052153] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Deleting the datastore file [datastore1] 353ceb53-07e6-4e9b-bed5-ce9fca368b27 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 815.052645] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cd6c1ed9-a677-4c83-8880-4eed8b19886e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.062563] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9950232c-3bfe-4479-90d4-e2fdb9330178 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.066950] env[69927]: DEBUG oslo_vmware.api [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 815.066950] env[69927]: value = "task-4095815" [ 815.066950] env[69927]: _type = "Task" [ 815.066950] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.068283] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-237573a8-2e32-4547-b8b8-75e4f4169563 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.082336] env[69927]: DEBUG nova.compute.provider_tree [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 815.090717] env[69927]: DEBUG oslo_vmware.api [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095815, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.096060] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca1a77c-b17f-4d31-9183-26d75f8574f5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.111600] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d0337e1d-ceea-4d6e-91e0-7866741f6587 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "95c02aa2-d587-4c9f-9b02-2992dfe5b1be" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.333s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 815.129786] env[69927]: DEBUG nova.compute.manager [req-e1c15623-8ac3-4348-84b7-979740001f95 req-cef925a1-8e0c-4941-b272-0da148b92304 service nova] [instance: 14359034-232d-478f-bf65-cf9937c59229] Detach interface failed, port_id=0d02a2be-1a9c-48c3-93c8-28b312303384, reason: Instance 14359034-232d-478f-bf65-cf9937c59229 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 815.554444] env[69927]: INFO nova.compute.manager [-] [instance: 14359034-232d-478f-bf65-cf9937c59229] Took 1.39 seconds to deallocate network for instance. [ 815.578688] env[69927]: DEBUG oslo_vmware.api [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4095815, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.31411} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.578881] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 815.579131] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 815.579328] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 815.579557] env[69927]: INFO nova.compute.manager [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Took 1.66 seconds to destroy the instance on the hypervisor. [ 815.580111] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 815.580111] env[69927]: DEBUG nova.compute.manager [-] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 815.580264] env[69927]: DEBUG nova.network.neutron [-] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 815.586278] env[69927]: DEBUG nova.scheduler.client.report [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 815.634726] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e8cf1ae5-57e6-4468-a861-d2c5897fa56a tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquiring lock "95c02aa2-d587-4c9f-9b02-2992dfe5b1be" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 815.635090] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e8cf1ae5-57e6-4468-a861-d2c5897fa56a tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "95c02aa2-d587-4c9f-9b02-2992dfe5b1be" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.060779] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.091892] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.663s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.092512] env[69927]: DEBUG nova.compute.manager [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 816.095225] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.419s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.097252] env[69927]: INFO nova.compute.claims [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 816.138299] env[69927]: INFO nova.compute.manager [None req-e8cf1ae5-57e6-4468-a861-d2c5897fa56a tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Detaching volume cd1547e6-ecf1-4990-bb85-9ad80fce4b95 [ 816.179621] env[69927]: INFO nova.virt.block_device [None req-e8cf1ae5-57e6-4468-a861-d2c5897fa56a tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Attempting to driver detach volume cd1547e6-ecf1-4990-bb85-9ad80fce4b95 from mountpoint /dev/sdb [ 816.179860] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8cf1ae5-57e6-4468-a861-d2c5897fa56a tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Volume detach. Driver type: vmdk {{(pid=69927) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 816.180073] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8cf1ae5-57e6-4468-a861-d2c5897fa56a tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811407', 'volume_id': 'cd1547e6-ecf1-4990-bb85-9ad80fce4b95', 'name': 'volume-cd1547e6-ecf1-4990-bb85-9ad80fce4b95', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '95c02aa2-d587-4c9f-9b02-2992dfe5b1be', 'attached_at': '', 'detached_at': '', 'volume_id': 'cd1547e6-ecf1-4990-bb85-9ad80fce4b95', 'serial': 'cd1547e6-ecf1-4990-bb85-9ad80fce4b95'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 816.181038] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc7d3f1-c048-4df4-b7b6-cb9f07fb392e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.203561] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e144f7f7-7d71-4e6d-b481-1a26dd54a9b3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.212352] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84aa44ed-9a25-4ecd-8e85-6bbcbe3008ac {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.236701] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7cb39e6-81f8-4418-bfa6-267fdd4ce525 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.254440] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8cf1ae5-57e6-4468-a861-d2c5897fa56a tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] The volume has not been displaced from its original location: [datastore2] volume-cd1547e6-ecf1-4990-bb85-9ad80fce4b95/volume-cd1547e6-ecf1-4990-bb85-9ad80fce4b95.vmdk. No consolidation needed. {{(pid=69927) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 816.263114] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8cf1ae5-57e6-4468-a861-d2c5897fa56a tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Reconfiguring VM instance instance-0000001d to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 816.263114] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-165652b3-30d2-4af8-a01e-7ed269e2c86e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.281062] env[69927]: DEBUG oslo_vmware.api [None req-e8cf1ae5-57e6-4468-a861-d2c5897fa56a tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for the task: (returnval){ [ 816.281062] env[69927]: value = "task-4095816" [ 816.281062] env[69927]: _type = "Task" [ 816.281062] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.290815] env[69927]: DEBUG oslo_vmware.api [None req-e8cf1ae5-57e6-4468-a861-d2c5897fa56a tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4095816, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.375779] env[69927]: DEBUG nova.network.neutron [-] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.597093] env[69927]: DEBUG nova.compute.manager [req-6ac40299-b10a-499a-8ea3-094895942cb7 req-01145486-08dd-4e13-b406-06bb9ce1a1a5 service nova] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Received event network-vif-deleted-8d92ef1c-0941-4eaa-a28f-e5cf6d76a571 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 816.602945] env[69927]: DEBUG nova.compute.utils [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 816.605049] env[69927]: DEBUG nova.compute.manager [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 816.605284] env[69927]: DEBUG nova.network.neutron [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 816.664718] env[69927]: DEBUG nova.policy [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cd1a4b19b9874a17bde997440649c7e5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1c5a402c4ef2452b9809e30a2fe91431', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 816.792903] env[69927]: DEBUG oslo_vmware.api [None req-e8cf1ae5-57e6-4468-a861-d2c5897fa56a tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4095816, 'name': ReconfigVM_Task, 'duration_secs': 0.302484} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.793578] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8cf1ae5-57e6-4468-a861-d2c5897fa56a tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Reconfigured VM instance instance-0000001d to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 816.799837] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1495071-c70e-40e4-9429-ea82b3f054e8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.821131] env[69927]: DEBUG oslo_vmware.api [None req-e8cf1ae5-57e6-4468-a861-d2c5897fa56a tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for the task: (returnval){ [ 816.821131] env[69927]: value = "task-4095817" [ 816.821131] env[69927]: _type = "Task" [ 816.821131] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.830530] env[69927]: DEBUG oslo_vmware.api [None req-e8cf1ae5-57e6-4468-a861-d2c5897fa56a tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4095817, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.878629] env[69927]: INFO nova.compute.manager [-] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Took 1.30 seconds to deallocate network for instance. [ 817.093071] env[69927]: DEBUG nova.network.neutron [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Successfully created port: 6337fd25-9a6d-4947-a0dd-1a56aac7beb3 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 817.110294] env[69927]: DEBUG nova.compute.manager [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 817.333230] env[69927]: DEBUG oslo_vmware.api [None req-e8cf1ae5-57e6-4468-a861-d2c5897fa56a tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4095817, 'name': ReconfigVM_Task, 'duration_secs': 0.168628} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.336133] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8cf1ae5-57e6-4468-a861-d2c5897fa56a tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811407', 'volume_id': 'cd1547e6-ecf1-4990-bb85-9ad80fce4b95', 'name': 'volume-cd1547e6-ecf1-4990-bb85-9ad80fce4b95', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '95c02aa2-d587-4c9f-9b02-2992dfe5b1be', 'attached_at': '', 'detached_at': '', 'volume_id': 'cd1547e6-ecf1-4990-bb85-9ad80fce4b95', 'serial': 'cd1547e6-ecf1-4990-bb85-9ad80fce4b95'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 817.386341] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 817.682204] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb57562-3e7b-4e30-b371-5e3869ffe492 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.691147] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ac6db4-cb85-48d1-8289-b0ca142fa8e2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.724247] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-966e66f5-64a3-4cd8-b5d9-4f81ab856bda {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.732340] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c38e97d4-747b-4def-a40d-bd7fd82ee2f8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.746462] env[69927]: DEBUG nova.compute.provider_tree [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 817.893988] env[69927]: DEBUG nova.objects.instance [None req-e8cf1ae5-57e6-4468-a861-d2c5897fa56a tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lazy-loading 'flavor' on Instance uuid 95c02aa2-d587-4c9f-9b02-2992dfe5b1be {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 818.116894] env[69927]: DEBUG nova.compute.manager [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 818.145282] env[69927]: DEBUG nova.virt.hardware [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 818.145540] env[69927]: DEBUG nova.virt.hardware [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 818.145721] env[69927]: DEBUG nova.virt.hardware [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 818.146283] env[69927]: DEBUG nova.virt.hardware [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 818.146283] env[69927]: DEBUG nova.virt.hardware [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 818.146283] env[69927]: DEBUG nova.virt.hardware [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 818.146617] env[69927]: DEBUG nova.virt.hardware [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 818.146617] env[69927]: DEBUG nova.virt.hardware [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 818.146716] env[69927]: DEBUG nova.virt.hardware [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 818.147274] env[69927]: DEBUG nova.virt.hardware [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 818.147274] env[69927]: DEBUG nova.virt.hardware [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 818.147993] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9742ba8-f13d-4912-9379-b170cfdef327 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.158065] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b10117-c996-44df-bbab-b7d1d3350dde {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.249423] env[69927]: DEBUG nova.scheduler.client.report [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 818.754420] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.659s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 818.755070] env[69927]: DEBUG nova.compute.manager [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 818.761045] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.997s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 818.761045] env[69927]: INFO nova.compute.claims [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 818.900536] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e8cf1ae5-57e6-4468-a861-d2c5897fa56a tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "95c02aa2-d587-4c9f-9b02-2992dfe5b1be" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.265s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.017704] env[69927]: DEBUG nova.compute.manager [req-078c6dac-be58-4ff0-ac2b-8ad6b1684e64 req-3fbb02e1-eada-4a40-bb85-d60300609d33 service nova] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Received event network-vif-plugged-6337fd25-9a6d-4947-a0dd-1a56aac7beb3 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 819.018148] env[69927]: DEBUG oslo_concurrency.lockutils [req-078c6dac-be58-4ff0-ac2b-8ad6b1684e64 req-3fbb02e1-eada-4a40-bb85-d60300609d33 service nova] Acquiring lock "21b7b237-557e-4030-93bb-6b5ce417e53c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.018270] env[69927]: DEBUG oslo_concurrency.lockutils [req-078c6dac-be58-4ff0-ac2b-8ad6b1684e64 req-3fbb02e1-eada-4a40-bb85-d60300609d33 service nova] Lock "21b7b237-557e-4030-93bb-6b5ce417e53c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.018457] env[69927]: DEBUG oslo_concurrency.lockutils [req-078c6dac-be58-4ff0-ac2b-8ad6b1684e64 req-3fbb02e1-eada-4a40-bb85-d60300609d33 service nova] Lock "21b7b237-557e-4030-93bb-6b5ce417e53c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.018706] env[69927]: DEBUG nova.compute.manager [req-078c6dac-be58-4ff0-ac2b-8ad6b1684e64 req-3fbb02e1-eada-4a40-bb85-d60300609d33 service nova] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] No waiting events found dispatching network-vif-plugged-6337fd25-9a6d-4947-a0dd-1a56aac7beb3 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 819.018964] env[69927]: WARNING nova.compute.manager [req-078c6dac-be58-4ff0-ac2b-8ad6b1684e64 req-3fbb02e1-eada-4a40-bb85-d60300609d33 service nova] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Received unexpected event network-vif-plugged-6337fd25-9a6d-4947-a0dd-1a56aac7beb3 for instance with vm_state building and task_state spawning. [ 819.041398] env[69927]: DEBUG nova.network.neutron [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Successfully updated port: 6337fd25-9a6d-4947-a0dd-1a56aac7beb3 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 819.265961] env[69927]: DEBUG nova.compute.utils [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 819.270068] env[69927]: DEBUG nova.compute.manager [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 819.270690] env[69927]: DEBUG nova.network.neutron [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 819.335084] env[69927]: DEBUG nova.policy [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ed22a7ac85664bd8b86c8a30c8d51910', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd28bd5c5feee4a39b76694d57eb3aaf0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 819.544187] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "refresh_cache-21b7b237-557e-4030-93bb-6b5ce417e53c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.544187] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquired lock "refresh_cache-21b7b237-557e-4030-93bb-6b5ce417e53c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.544187] env[69927]: DEBUG nova.network.neutron [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 819.771398] env[69927]: DEBUG nova.compute.manager [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 819.866221] env[69927]: DEBUG nova.network.neutron [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Successfully created port: 9db3f0b7-d302-466d-93ae-16dcd9a9f682 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 819.888664] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquiring lock "07484a6c-f9d1-405b-9ae4-a1b830f474ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.888850] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "07484a6c-f9d1-405b-9ae4-a1b830f474ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 820.086255] env[69927]: DEBUG nova.network.neutron [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 820.237038] env[69927]: DEBUG nova.network.neutron [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Updating instance_info_cache with network_info: [{"id": "6337fd25-9a6d-4947-a0dd-1a56aac7beb3", "address": "fa:16:3e:c2:7f:9f", "network": {"id": "41648ae7-ad4c-489e-b90f-65446dc0b1d1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1522598229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c5a402c4ef2452b9809e30a2fe91431", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c8a5d7c-ee1f-4a41-94e4-db31e85a398d", "external-id": "cl2-zone-613", "segmentation_id": 613, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6337fd25-9a", "ovs_interfaceid": "6337fd25-9a6d-4947-a0dd-1a56aac7beb3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.356422] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d6f29f-9eee-4db6-82d5-2a44ee281fea {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.365171] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7cc2311-7f93-4045-ab76-c2b12b74fb72 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.395895] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ac95ea-8431-44c3-8735-310db77a6baa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.404881] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3989e609-6148-49c6-9432-6131200529d9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.421439] env[69927]: DEBUG nova.compute.provider_tree [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 820.739676] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Releasing lock "refresh_cache-21b7b237-557e-4030-93bb-6b5ce417e53c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 820.740069] env[69927]: DEBUG nova.compute.manager [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Instance network_info: |[{"id": "6337fd25-9a6d-4947-a0dd-1a56aac7beb3", "address": "fa:16:3e:c2:7f:9f", "network": {"id": "41648ae7-ad4c-489e-b90f-65446dc0b1d1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1522598229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c5a402c4ef2452b9809e30a2fe91431", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c8a5d7c-ee1f-4a41-94e4-db31e85a398d", "external-id": "cl2-zone-613", "segmentation_id": 613, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6337fd25-9a", "ovs_interfaceid": "6337fd25-9a6d-4947-a0dd-1a56aac7beb3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 820.740571] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:7f:9f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4c8a5d7c-ee1f-4a41-94e4-db31e85a398d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6337fd25-9a6d-4947-a0dd-1a56aac7beb3', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 820.748782] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 820.749037] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 820.750088] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4c9bf342-df81-474f-9cc5-4c7f267a63f9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.769959] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 820.769959] env[69927]: value = "task-4095818" [ 820.769959] env[69927]: _type = "Task" [ 820.769959] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.779640] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095818, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.783387] env[69927]: DEBUG nova.compute.manager [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 820.818083] env[69927]: DEBUG nova.virt.hardware [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 820.818331] env[69927]: DEBUG nova.virt.hardware [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 820.818488] env[69927]: DEBUG nova.virt.hardware [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 820.818701] env[69927]: DEBUG nova.virt.hardware [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 820.818880] env[69927]: DEBUG nova.virt.hardware [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 820.819220] env[69927]: DEBUG nova.virt.hardware [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 820.819475] env[69927]: DEBUG nova.virt.hardware [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 820.821545] env[69927]: DEBUG nova.virt.hardware [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 820.821545] env[69927]: DEBUG nova.virt.hardware [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 820.821545] env[69927]: DEBUG nova.virt.hardware [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 820.821545] env[69927]: DEBUG nova.virt.hardware [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 820.821545] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a68eee-4bb0-4a3f-8a01-612a4f7075f0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.831535] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-633d7683-1d4e-4a40-8154-39bc9d7f218c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.925631] env[69927]: DEBUG nova.scheduler.client.report [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 821.054505] env[69927]: DEBUG nova.compute.manager [req-5e530e80-f164-4721-a7aa-fc437d8fce19 req-415ca3f8-67a8-489b-b3b5-aa09f72698ba service nova] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Received event network-changed-6337fd25-9a6d-4947-a0dd-1a56aac7beb3 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 821.055287] env[69927]: DEBUG nova.compute.manager [req-5e530e80-f164-4721-a7aa-fc437d8fce19 req-415ca3f8-67a8-489b-b3b5-aa09f72698ba service nova] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Refreshing instance network info cache due to event network-changed-6337fd25-9a6d-4947-a0dd-1a56aac7beb3. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 821.055287] env[69927]: DEBUG oslo_concurrency.lockutils [req-5e530e80-f164-4721-a7aa-fc437d8fce19 req-415ca3f8-67a8-489b-b3b5-aa09f72698ba service nova] Acquiring lock "refresh_cache-21b7b237-557e-4030-93bb-6b5ce417e53c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.055287] env[69927]: DEBUG oslo_concurrency.lockutils [req-5e530e80-f164-4721-a7aa-fc437d8fce19 req-415ca3f8-67a8-489b-b3b5-aa09f72698ba service nova] Acquired lock "refresh_cache-21b7b237-557e-4030-93bb-6b5ce417e53c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.055468] env[69927]: DEBUG nova.network.neutron [req-5e530e80-f164-4721-a7aa-fc437d8fce19 req-415ca3f8-67a8-489b-b3b5-aa09f72698ba service nova] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Refreshing network info cache for port 6337fd25-9a6d-4947-a0dd-1a56aac7beb3 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 821.281491] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095818, 'name': CreateVM_Task, 'duration_secs': 0.451258} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.281771] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 821.282682] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.282883] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.283282] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 821.283581] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6df35ea2-7630-4273-9b16-9ede9f65d3e1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.289863] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 821.289863] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52998f90-4f1f-2ae2-0779-bec890e2ff74" [ 821.289863] env[69927]: _type = "Task" [ 821.289863] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.299765] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52998f90-4f1f-2ae2-0779-bec890e2ff74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.430327] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.672s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.430882] env[69927]: DEBUG nova.compute.manager [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 821.434183] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 47.463s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.435114] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.437248] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 43.435s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.437502] env[69927]: DEBUG nova.objects.instance [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69927) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 821.472895] env[69927]: INFO nova.scheduler.client.report [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Deleted allocations for instance 6e698775-2556-4cbe-b65f-0cc3efa7bcf6 [ 821.637575] env[69927]: DEBUG nova.network.neutron [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Successfully updated port: 9db3f0b7-d302-466d-93ae-16dcd9a9f682 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 821.786896] env[69927]: DEBUG nova.network.neutron [req-5e530e80-f164-4721-a7aa-fc437d8fce19 req-415ca3f8-67a8-489b-b3b5-aa09f72698ba service nova] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Updated VIF entry in instance network info cache for port 6337fd25-9a6d-4947-a0dd-1a56aac7beb3. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 821.788182] env[69927]: DEBUG nova.network.neutron [req-5e530e80-f164-4721-a7aa-fc437d8fce19 req-415ca3f8-67a8-489b-b3b5-aa09f72698ba service nova] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Updating instance_info_cache with network_info: [{"id": "6337fd25-9a6d-4947-a0dd-1a56aac7beb3", "address": "fa:16:3e:c2:7f:9f", "network": {"id": "41648ae7-ad4c-489e-b90f-65446dc0b1d1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1522598229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c5a402c4ef2452b9809e30a2fe91431", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c8a5d7c-ee1f-4a41-94e4-db31e85a398d", "external-id": "cl2-zone-613", "segmentation_id": 613, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6337fd25-9a", "ovs_interfaceid": "6337fd25-9a6d-4947-a0dd-1a56aac7beb3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.800185] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52998f90-4f1f-2ae2-0779-bec890e2ff74, 'name': SearchDatastore_Task, 'duration_secs': 0.01415} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.801066] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.801306] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 821.801536] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.801683] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.801862] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 821.802370] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ffb964cd-705c-487c-870b-db3e3f393d5a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.814819] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 821.815022] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 821.815798] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52192fe1-6abf-4847-99b3-dab05280f9dd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.821924] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 821.821924] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5248b806-bda3-0912-c3c7-ae9034e2aa13" [ 821.821924] env[69927]: _type = "Task" [ 821.821924] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.829926] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5248b806-bda3-0912-c3c7-ae9034e2aa13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.942972] env[69927]: DEBUG nova.compute.utils [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 821.948161] env[69927]: DEBUG nova.compute.manager [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 821.948354] env[69927]: DEBUG nova.network.neutron [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 821.983888] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9e9a2ba-793b-4411-b235-fe6c43985395 tempest-ServersTestManualDisk-1798148514 tempest-ServersTestManualDisk-1798148514-project-member] Lock "6e698775-2556-4cbe-b65f-0cc3efa7bcf6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.870s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.987116] env[69927]: DEBUG nova.policy [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ed20f23b4104e2ea75ea29b804c79d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ed984d7170742eca7e89bf3bf45e6ae', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 822.141329] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquiring lock "refresh_cache-a36b06ca-77c8-4d2f-8b43-2c160fbac93f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.141468] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquired lock "refresh_cache-a36b06ca-77c8-4d2f-8b43-2c160fbac93f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 822.141613] env[69927]: DEBUG nova.network.neutron [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 822.290582] env[69927]: DEBUG oslo_concurrency.lockutils [req-5e530e80-f164-4721-a7aa-fc437d8fce19 req-415ca3f8-67a8-489b-b3b5-aa09f72698ba service nova] Releasing lock "refresh_cache-21b7b237-557e-4030-93bb-6b5ce417e53c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 822.334868] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5248b806-bda3-0912-c3c7-ae9034e2aa13, 'name': SearchDatastore_Task, 'duration_secs': 0.010729} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.335794] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58e87fed-0824-45ef-a76d-ff18da3f87f3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.342391] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 822.342391] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525634a6-3796-f0d7-8cc0-b88679639890" [ 822.342391] env[69927]: _type = "Task" [ 822.342391] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.355023] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525634a6-3796-f0d7-8cc0-b88679639890, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.355717] env[69927]: DEBUG nova.network.neutron [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Successfully created port: ef273cd9-400b-409d-b689-297040f2ca04 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 822.449634] env[69927]: DEBUG nova.compute.manager [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 822.453365] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c8f018c1-83e8-440a-a4b9-d9a808802c51 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.454709] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 43.506s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 822.456605] env[69927]: INFO nova.compute.claims [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 822.703656] env[69927]: DEBUG nova.network.neutron [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 822.854350] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525634a6-3796-f0d7-8cc0-b88679639890, 'name': SearchDatastore_Task, 'duration_secs': 0.011026} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.854640] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 822.854936] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 21b7b237-557e-4030-93bb-6b5ce417e53c/21b7b237-557e-4030-93bb-6b5ce417e53c.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 822.855243] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba3e58bb-918b-4148-9dca-9f968d7c45ac {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.863184] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 822.863184] env[69927]: value = "task-4095819" [ 822.863184] env[69927]: _type = "Task" [ 822.863184] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.872871] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4095819, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.107517] env[69927]: DEBUG nova.network.neutron [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Updating instance_info_cache with network_info: [{"id": "9db3f0b7-d302-466d-93ae-16dcd9a9f682", "address": "fa:16:3e:f9:89:59", "network": {"id": "6ec78f5d-4de4-4f02-a771-97092e489acc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1456290359-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d28bd5c5feee4a39b76694d57eb3aaf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9db3f0b7-d3", "ovs_interfaceid": "9db3f0b7-d302-466d-93ae-16dcd9a9f682", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.376582] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4095819, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.421392] env[69927]: DEBUG nova.compute.manager [req-a4b6ec13-cc62-43d0-938a-10c9a468878d req-493d725a-6384-478b-86ef-804ea4b21bba service nova] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Received event network-vif-plugged-9db3f0b7-d302-466d-93ae-16dcd9a9f682 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 823.421601] env[69927]: DEBUG oslo_concurrency.lockutils [req-a4b6ec13-cc62-43d0-938a-10c9a468878d req-493d725a-6384-478b-86ef-804ea4b21bba service nova] Acquiring lock "a36b06ca-77c8-4d2f-8b43-2c160fbac93f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.421814] env[69927]: DEBUG oslo_concurrency.lockutils [req-a4b6ec13-cc62-43d0-938a-10c9a468878d req-493d725a-6384-478b-86ef-804ea4b21bba service nova] Lock "a36b06ca-77c8-4d2f-8b43-2c160fbac93f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.422167] env[69927]: DEBUG oslo_concurrency.lockutils [req-a4b6ec13-cc62-43d0-938a-10c9a468878d req-493d725a-6384-478b-86ef-804ea4b21bba service nova] Lock "a36b06ca-77c8-4d2f-8b43-2c160fbac93f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.422498] env[69927]: DEBUG nova.compute.manager [req-a4b6ec13-cc62-43d0-938a-10c9a468878d req-493d725a-6384-478b-86ef-804ea4b21bba service nova] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] No waiting events found dispatching network-vif-plugged-9db3f0b7-d302-466d-93ae-16dcd9a9f682 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 823.422778] env[69927]: WARNING nova.compute.manager [req-a4b6ec13-cc62-43d0-938a-10c9a468878d req-493d725a-6384-478b-86ef-804ea4b21bba service nova] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Received unexpected event network-vif-plugged-9db3f0b7-d302-466d-93ae-16dcd9a9f682 for instance with vm_state building and task_state spawning. [ 823.423100] env[69927]: DEBUG nova.compute.manager [req-a4b6ec13-cc62-43d0-938a-10c9a468878d req-493d725a-6384-478b-86ef-804ea4b21bba service nova] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Received event network-changed-9db3f0b7-d302-466d-93ae-16dcd9a9f682 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 823.423287] env[69927]: DEBUG nova.compute.manager [req-a4b6ec13-cc62-43d0-938a-10c9a468878d req-493d725a-6384-478b-86ef-804ea4b21bba service nova] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Refreshing instance network info cache due to event network-changed-9db3f0b7-d302-466d-93ae-16dcd9a9f682. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 823.423486] env[69927]: DEBUG oslo_concurrency.lockutils [req-a4b6ec13-cc62-43d0-938a-10c9a468878d req-493d725a-6384-478b-86ef-804ea4b21bba service nova] Acquiring lock "refresh_cache-a36b06ca-77c8-4d2f-8b43-2c160fbac93f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.468261] env[69927]: DEBUG nova.compute.manager [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 823.510447] env[69927]: DEBUG nova.virt.hardware [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 823.510706] env[69927]: DEBUG nova.virt.hardware [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 823.510868] env[69927]: DEBUG nova.virt.hardware [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 823.511545] env[69927]: DEBUG nova.virt.hardware [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 823.511778] env[69927]: DEBUG nova.virt.hardware [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 823.511939] env[69927]: DEBUG nova.virt.hardware [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 823.512173] env[69927]: DEBUG nova.virt.hardware [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 823.512338] env[69927]: DEBUG nova.virt.hardware [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 823.512549] env[69927]: DEBUG nova.virt.hardware [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 823.512770] env[69927]: DEBUG nova.virt.hardware [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 823.512955] env[69927]: DEBUG nova.virt.hardware [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 823.513860] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f852a49-6555-4936-95f2-30e064e1a529 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.526218] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0bded2-f178-496d-a411-524ba94e5c32 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.610160] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Releasing lock "refresh_cache-a36b06ca-77c8-4d2f-8b43-2c160fbac93f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 823.610896] env[69927]: DEBUG nova.compute.manager [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Instance network_info: |[{"id": "9db3f0b7-d302-466d-93ae-16dcd9a9f682", "address": "fa:16:3e:f9:89:59", "network": {"id": "6ec78f5d-4de4-4f02-a771-97092e489acc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1456290359-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d28bd5c5feee4a39b76694d57eb3aaf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9db3f0b7-d3", "ovs_interfaceid": "9db3f0b7-d302-466d-93ae-16dcd9a9f682", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 823.614455] env[69927]: DEBUG oslo_concurrency.lockutils [req-a4b6ec13-cc62-43d0-938a-10c9a468878d req-493d725a-6384-478b-86ef-804ea4b21bba service nova] Acquired lock "refresh_cache-a36b06ca-77c8-4d2f-8b43-2c160fbac93f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 823.614749] env[69927]: DEBUG nova.network.neutron [req-a4b6ec13-cc62-43d0-938a-10c9a468878d req-493d725a-6384-478b-86ef-804ea4b21bba service nova] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Refreshing network info cache for port 9db3f0b7-d302-466d-93ae-16dcd9a9f682 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 823.619097] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:89:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6966f473-59ac-49bb-9b7a-22c61f4e61e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9db3f0b7-d302-466d-93ae-16dcd9a9f682', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 823.624861] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 823.628126] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 823.628653] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-45aad6d8-7032-473c-aece-0da8820c1e4e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.656044] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 823.656044] env[69927]: value = "task-4095820" [ 823.656044] env[69927]: _type = "Task" [ 823.656044] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.662758] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095820, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.874125] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4095819, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.537905} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.874125] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 21b7b237-557e-4030-93bb-6b5ce417e53c/21b7b237-557e-4030-93bb-6b5ce417e53c.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 823.874780] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 823.878180] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f3474a8e-3a64-4cc6-9210-fb1bbefc67e3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.883330] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 823.883330] env[69927]: value = "task-4095821" [ 823.883330] env[69927]: _type = "Task" [ 823.883330] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.895474] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4095821, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.969326] env[69927]: DEBUG nova.compute.manager [req-88f9d5f1-e42c-4503-81d6-47303f1a7d54 req-e0319d05-63b0-4ff5-afb2-18dd25cbb782 service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Received event network-vif-plugged-ef273cd9-400b-409d-b689-297040f2ca04 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 823.969532] env[69927]: DEBUG oslo_concurrency.lockutils [req-88f9d5f1-e42c-4503-81d6-47303f1a7d54 req-e0319d05-63b0-4ff5-afb2-18dd25cbb782 service nova] Acquiring lock "ff227e07-8e36-48d6-a8c7-1e0087fd1faa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.969858] env[69927]: DEBUG oslo_concurrency.lockutils [req-88f9d5f1-e42c-4503-81d6-47303f1a7d54 req-e0319d05-63b0-4ff5-afb2-18dd25cbb782 service nova] Lock "ff227e07-8e36-48d6-a8c7-1e0087fd1faa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.969943] env[69927]: DEBUG oslo_concurrency.lockutils [req-88f9d5f1-e42c-4503-81d6-47303f1a7d54 req-e0319d05-63b0-4ff5-afb2-18dd25cbb782 service nova] Lock "ff227e07-8e36-48d6-a8c7-1e0087fd1faa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.970158] env[69927]: DEBUG nova.compute.manager [req-88f9d5f1-e42c-4503-81d6-47303f1a7d54 req-e0319d05-63b0-4ff5-afb2-18dd25cbb782 service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] No waiting events found dispatching network-vif-plugged-ef273cd9-400b-409d-b689-297040f2ca04 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 823.970332] env[69927]: WARNING nova.compute.manager [req-88f9d5f1-e42c-4503-81d6-47303f1a7d54 req-e0319d05-63b0-4ff5-afb2-18dd25cbb782 service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Received unexpected event network-vif-plugged-ef273cd9-400b-409d-b689-297040f2ca04 for instance with vm_state building and task_state spawning. [ 823.974524] env[69927]: DEBUG nova.network.neutron [req-a4b6ec13-cc62-43d0-938a-10c9a468878d req-493d725a-6384-478b-86ef-804ea4b21bba service nova] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Updated VIF entry in instance network info cache for port 9db3f0b7-d302-466d-93ae-16dcd9a9f682. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 823.974976] env[69927]: DEBUG nova.network.neutron [req-a4b6ec13-cc62-43d0-938a-10c9a468878d req-493d725a-6384-478b-86ef-804ea4b21bba service nova] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Updating instance_info_cache with network_info: [{"id": "9db3f0b7-d302-466d-93ae-16dcd9a9f682", "address": "fa:16:3e:f9:89:59", "network": {"id": "6ec78f5d-4de4-4f02-a771-97092e489acc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1456290359-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d28bd5c5feee4a39b76694d57eb3aaf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9db3f0b7-d3", "ovs_interfaceid": "9db3f0b7-d302-466d-93ae-16dcd9a9f682", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.092401] env[69927]: DEBUG nova.network.neutron [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Successfully updated port: ef273cd9-400b-409d-b689-297040f2ca04 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 824.131398] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28607e38-6bcc-4a7b-a1cd-497ebf7f5e00 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.138830] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c7ac0b-26f6-4581-9c33-f9d699cd7865 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.174100] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d83f96-8d8e-464c-8b6b-6a38e41893e5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.182345] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095820, 'name': CreateVM_Task, 'duration_secs': 0.393069} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.184322] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 824.185105] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.185273] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.185585] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 824.186771] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c504059-603b-4cd6-971e-edaece4a8b44 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.190393] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afc7ab2f-b0c1-482a-8653-bb788ce0b03e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.204186] env[69927]: DEBUG nova.compute.provider_tree [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 824.205734] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Waiting for the task: (returnval){ [ 824.205734] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a5640a-ddcd-72eb-c96a-b289a9e2f3cf" [ 824.205734] env[69927]: _type = "Task" [ 824.205734] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.215706] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a5640a-ddcd-72eb-c96a-b289a9e2f3cf, 'name': SearchDatastore_Task, 'duration_secs': 0.011292} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.217015] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 824.217343] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 824.217598] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.217746] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.217923] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 824.218665] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c18691a-9128-46a5-9040-400ac59586b6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.228933] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 824.229149] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 824.229896] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9242153e-a671-47f3-9a45-90d812b00545 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.236185] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Waiting for the task: (returnval){ [ 824.236185] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5239c673-ebba-9d91-7510-d2115cabb451" [ 824.236185] env[69927]: _type = "Task" [ 824.236185] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.244501] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5239c673-ebba-9d91-7510-d2115cabb451, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.395895] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4095821, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078561} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.396245] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 824.398207] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4588096-a256-4560-8a77-8047a5bfc82d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.421331] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Reconfiguring VM instance instance-00000026 to attach disk [datastore2] 21b7b237-557e-4030-93bb-6b5ce417e53c/21b7b237-557e-4030-93bb-6b5ce417e53c.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 824.421918] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efba4eab-a64a-4027-996f-ae9b7d079433 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.445163] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 824.445163] env[69927]: value = "task-4095822" [ 824.445163] env[69927]: _type = "Task" [ 824.445163] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.454025] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4095822, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.481044] env[69927]: DEBUG oslo_concurrency.lockutils [req-a4b6ec13-cc62-43d0-938a-10c9a468878d req-493d725a-6384-478b-86ef-804ea4b21bba service nova] Releasing lock "refresh_cache-a36b06ca-77c8-4d2f-8b43-2c160fbac93f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 824.594702] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "refresh_cache-ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.594903] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "refresh_cache-ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.595071] env[69927]: DEBUG nova.network.neutron [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 824.710962] env[69927]: DEBUG nova.scheduler.client.report [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 824.749074] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5239c673-ebba-9d91-7510-d2115cabb451, 'name': SearchDatastore_Task, 'duration_secs': 0.009264} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.749734] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d4967f8-5930-4e6b-91ce-88c91737e8e3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.756440] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Waiting for the task: (returnval){ [ 824.756440] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52112086-c0ad-878b-adf5-4304549fb804" [ 824.756440] env[69927]: _type = "Task" [ 824.756440] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.765544] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52112086-c0ad-878b-adf5-4304549fb804, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.957699] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4095822, 'name': ReconfigVM_Task, 'duration_secs': 0.292946} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.958217] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Reconfigured VM instance instance-00000026 to attach disk [datastore2] 21b7b237-557e-4030-93bb-6b5ce417e53c/21b7b237-557e-4030-93bb-6b5ce417e53c.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 824.959016] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a85870df-386a-41c3-ab35-331d1ef9024f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.970280] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 824.970280] env[69927]: value = "task-4095823" [ 824.970280] env[69927]: _type = "Task" [ 824.970280] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.979402] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4095823, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.139424] env[69927]: DEBUG nova.network.neutron [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 825.218248] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.764s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 825.218992] env[69927]: DEBUG nova.compute.manager [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 825.221740] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.209s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 825.222996] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 825.225283] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 40.389s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 825.225788] env[69927]: DEBUG nova.objects.instance [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69927) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 825.258912] env[69927]: INFO nova.scheduler.client.report [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Deleted allocations for instance 9d83dda3-5fb1-416d-9307-faeef454efec [ 825.276037] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52112086-c0ad-878b-adf5-4304549fb804, 'name': SearchDatastore_Task, 'duration_secs': 0.01032} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.276324] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.276580] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] a36b06ca-77c8-4d2f-8b43-2c160fbac93f/a36b06ca-77c8-4d2f-8b43-2c160fbac93f.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 825.277030] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1cb53a7d-56d4-403f-aca9-a2aa2a6ea73b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.284690] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Waiting for the task: (returnval){ [ 825.284690] env[69927]: value = "task-4095824" [ 825.284690] env[69927]: _type = "Task" [ 825.284690] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.296364] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095824, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.314777] env[69927]: DEBUG nova.network.neutron [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Updating instance_info_cache with network_info: [{"id": "ef273cd9-400b-409d-b689-297040f2ca04", "address": "fa:16:3e:3c:b9:fd", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef273cd9-40", "ovs_interfaceid": "ef273cd9-400b-409d-b689-297040f2ca04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.481548] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4095823, 'name': Rename_Task, 'duration_secs': 0.149457} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.481973] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 825.482085] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-baa0578d-dd9c-4bc9-ba05-0d7da9fea447 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.491424] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 825.491424] env[69927]: value = "task-4095825" [ 825.491424] env[69927]: _type = "Task" [ 825.491424] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.501873] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4095825, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.733184] env[69927]: DEBUG nova.compute.utils [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 825.740175] env[69927]: DEBUG nova.compute.manager [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 825.741336] env[69927]: DEBUG nova.network.neutron [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 825.776450] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dd3f90ce-7244-4dc7-8d56-4bf077ecb761 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "9d83dda3-5fb1-416d-9307-faeef454efec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.490s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 825.793928] env[69927]: DEBUG nova.policy [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '53f63b6df5ed46fd8584bbf36716a0c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '66cbe813421e40d1bd515411bc3c045a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 825.802172] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095824, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488296} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.803361] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] a36b06ca-77c8-4d2f-8b43-2c160fbac93f/a36b06ca-77c8-4d2f-8b43-2c160fbac93f.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 825.803628] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 825.803915] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4e75599f-2da2-4e8c-9301-212666fc5a05 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.812351] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Waiting for the task: (returnval){ [ 825.812351] env[69927]: value = "task-4095826" [ 825.812351] env[69927]: _type = "Task" [ 825.812351] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.816227] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "refresh_cache-ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.816537] env[69927]: DEBUG nova.compute.manager [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Instance network_info: |[{"id": "ef273cd9-400b-409d-b689-297040f2ca04", "address": "fa:16:3e:3c:b9:fd", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef273cd9-40", "ovs_interfaceid": "ef273cd9-400b-409d-b689-297040f2ca04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 825.817040] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:b9:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5b21ab10-d886-4453-9472-9e11fb3c450d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef273cd9-400b-409d-b689-297040f2ca04', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 825.825068] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Creating folder: Project (1ed984d7170742eca7e89bf3bf45e6ae). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 825.826245] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-29181a3b-6f7a-41bd-977d-821f2ef40d70 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.831576] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095826, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.842554] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Created folder: Project (1ed984d7170742eca7e89bf3bf45e6ae) in parent group-v811283. [ 825.842554] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Creating folder: Instances. Parent ref: group-v811411. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 825.842554] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-18fab45c-6e38-47a3-a27f-b21926e2427f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.854450] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Created folder: Instances in parent group-v811411. [ 825.854833] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 825.855094] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 825.855382] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d60fee2c-12db-4229-9fd2-fac806fe34b8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.880747] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 825.880747] env[69927]: value = "task-4095829" [ 825.880747] env[69927]: _type = "Task" [ 825.880747] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.891922] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095829, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.005428] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4095825, 'name': PowerOnVM_Task} progress is 90%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.007817] env[69927]: DEBUG nova.compute.manager [req-b69a531d-302c-4c67-a617-47ac8bc25ac7 req-0f1b9b6b-7827-467b-ba34-c1f732c2c11c service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Received event network-changed-ef273cd9-400b-409d-b689-297040f2ca04 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 826.008009] env[69927]: DEBUG nova.compute.manager [req-b69a531d-302c-4c67-a617-47ac8bc25ac7 req-0f1b9b6b-7827-467b-ba34-c1f732c2c11c service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Refreshing instance network info cache due to event network-changed-ef273cd9-400b-409d-b689-297040f2ca04. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 826.008268] env[69927]: DEBUG oslo_concurrency.lockutils [req-b69a531d-302c-4c67-a617-47ac8bc25ac7 req-0f1b9b6b-7827-467b-ba34-c1f732c2c11c service nova] Acquiring lock "refresh_cache-ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.008841] env[69927]: DEBUG oslo_concurrency.lockutils [req-b69a531d-302c-4c67-a617-47ac8bc25ac7 req-0f1b9b6b-7827-467b-ba34-c1f732c2c11c service nova] Acquired lock "refresh_cache-ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 826.008841] env[69927]: DEBUG nova.network.neutron [req-b69a531d-302c-4c67-a617-47ac8bc25ac7 req-0f1b9b6b-7827-467b-ba34-c1f732c2c11c service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Refreshing network info cache for port ef273cd9-400b-409d-b689-297040f2ca04 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 826.198155] env[69927]: DEBUG nova.network.neutron [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Successfully created port: efc77bd5-a980-4a4e-9211-70184239a8ee {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 826.240872] env[69927]: DEBUG nova.compute.manager [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 826.246880] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0807520d-6809-4990-8e1a-9aad4661f3db tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.253346] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 37.972s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.253346] env[69927]: DEBUG nova.objects.instance [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69927) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 826.331027] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095826, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076123} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.331027] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 826.331027] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbba685a-dd4c-448d-a7fb-47737fb28059 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.357218] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Reconfiguring VM instance instance-00000027 to attach disk [datastore2] a36b06ca-77c8-4d2f-8b43-2c160fbac93f/a36b06ca-77c8-4d2f-8b43-2c160fbac93f.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 826.357972] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57daf147-4c2f-4e07-8c31-fe804f745368 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.379992] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Waiting for the task: (returnval){ [ 826.379992] env[69927]: value = "task-4095830" [ 826.379992] env[69927]: _type = "Task" [ 826.379992] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.392733] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095830, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.395688] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095829, 'name': CreateVM_Task, 'duration_secs': 0.483743} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.395688] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 826.396467] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.396644] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 826.397745] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 826.397745] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44367f39-9001-4f99-a543-00bf26c3784a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.403100] env[69927]: DEBUG oslo_vmware.api [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 826.403100] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e56bd1-808b-6e1e-75ba-b7e6fa0f1d1f" [ 826.403100] env[69927]: _type = "Task" [ 826.403100] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.414842] env[69927]: DEBUG oslo_vmware.api [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e56bd1-808b-6e1e-75ba-b7e6fa0f1d1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.505444] env[69927]: DEBUG oslo_vmware.api [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4095825, 'name': PowerOnVM_Task, 'duration_secs': 0.608301} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.505444] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 826.505444] env[69927]: INFO nova.compute.manager [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Took 8.39 seconds to spawn the instance on the hypervisor. [ 826.505444] env[69927]: DEBUG nova.compute.manager [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 826.505893] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8106ceb-3111-4c64-91bc-9fe5f23061b5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.892021] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095830, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.913340] env[69927]: DEBUG oslo_vmware.api [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e56bd1-808b-6e1e-75ba-b7e6fa0f1d1f, 'name': SearchDatastore_Task, 'duration_secs': 0.041484} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.913660] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 826.914430] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 826.914430] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.914430] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 826.914706] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 826.914778] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a90b0add-cfd1-4bf6-af2b-f43dbc42ca38 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.925661] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 826.925761] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 826.926512] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f5ecce2-e399-4b7f-8c6f-f5b638e27100 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.946888] env[69927]: DEBUG oslo_vmware.api [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 826.946888] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52079e5d-5c12-0afb-329d-f70917ce3cd0" [ 826.946888] env[69927]: _type = "Task" [ 826.946888] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.956916] env[69927]: DEBUG oslo_vmware.api [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52079e5d-5c12-0afb-329d-f70917ce3cd0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.032752] env[69927]: INFO nova.compute.manager [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Took 58.87 seconds to build instance. [ 827.249183] env[69927]: DEBUG nova.network.neutron [req-b69a531d-302c-4c67-a617-47ac8bc25ac7 req-0f1b9b6b-7827-467b-ba34-c1f732c2c11c service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Updated VIF entry in instance network info cache for port ef273cd9-400b-409d-b689-297040f2ca04. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 827.249597] env[69927]: DEBUG nova.network.neutron [req-b69a531d-302c-4c67-a617-47ac8bc25ac7 req-0f1b9b6b-7827-467b-ba34-c1f732c2c11c service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Updating instance_info_cache with network_info: [{"id": "ef273cd9-400b-409d-b689-297040f2ca04", "address": "fa:16:3e:3c:b9:fd", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef273cd9-40", "ovs_interfaceid": "ef273cd9-400b-409d-b689-297040f2ca04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.257084] env[69927]: DEBUG nova.compute.manager [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 827.262704] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b446b6f4-8f8a-44c0-a1e4-f663022d60e2 tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.265256] env[69927]: DEBUG oslo_concurrency.lockutils [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.159s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 827.267282] env[69927]: INFO nova.compute.claims [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 827.304084] env[69927]: DEBUG nova.virt.hardware [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 827.304338] env[69927]: DEBUG nova.virt.hardware [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 827.304529] env[69927]: DEBUG nova.virt.hardware [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 827.304799] env[69927]: DEBUG nova.virt.hardware [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 827.306200] env[69927]: DEBUG nova.virt.hardware [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 827.306200] env[69927]: DEBUG nova.virt.hardware [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 827.306200] env[69927]: DEBUG nova.virt.hardware [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 827.306200] env[69927]: DEBUG nova.virt.hardware [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 827.306200] env[69927]: DEBUG nova.virt.hardware [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 827.306567] env[69927]: DEBUG nova.virt.hardware [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 827.306567] env[69927]: DEBUG nova.virt.hardware [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 827.308476] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a0a2c5-6cd5-477d-ac4c-d9e1de0747ad {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.319320] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2079b02c-ddbc-497d-aae6-edada222b6a5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.391256] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095830, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.461027] env[69927]: DEBUG oslo_vmware.api [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52079e5d-5c12-0afb-329d-f70917ce3cd0, 'name': SearchDatastore_Task, 'duration_secs': 0.034804} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.462034] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f867c163-4d64-4ca6-8dda-8ddd08841394 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.469174] env[69927]: DEBUG oslo_vmware.api [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 827.469174] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ece705-0df8-844f-4b0e-47081d62cfe0" [ 827.469174] env[69927]: _type = "Task" [ 827.469174] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.478630] env[69927]: DEBUG oslo_vmware.api [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ece705-0df8-844f-4b0e-47081d62cfe0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.537848] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6070f6e-85ea-41b9-b4c4-ac6ea0c0d353 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "21b7b237-557e-4030-93bb-6b5ce417e53c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 108.659s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.757757] env[69927]: DEBUG oslo_concurrency.lockutils [req-b69a531d-302c-4c67-a617-47ac8bc25ac7 req-0f1b9b6b-7827-467b-ba34-c1f732c2c11c service nova] Releasing lock "refresh_cache-ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 827.816124] env[69927]: DEBUG nova.network.neutron [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Successfully updated port: efc77bd5-a980-4a4e-9211-70184239a8ee {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 827.892656] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095830, 'name': ReconfigVM_Task, 'duration_secs': 1.126185} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.892656] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Reconfigured VM instance instance-00000027 to attach disk [datastore2] a36b06ca-77c8-4d2f-8b43-2c160fbac93f/a36b06ca-77c8-4d2f-8b43-2c160fbac93f.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 827.895250] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-41b9f5d5-6579-437c-b6c1-2ae5a4c7a07f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.903761] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Waiting for the task: (returnval){ [ 827.903761] env[69927]: value = "task-4095831" [ 827.903761] env[69927]: _type = "Task" [ 827.903761] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.913832] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095831, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.992202] env[69927]: DEBUG oslo_vmware.api [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ece705-0df8-844f-4b0e-47081d62cfe0, 'name': SearchDatastore_Task, 'duration_secs': 0.033423} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.992202] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 827.992202] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] ff227e07-8e36-48d6-a8c7-1e0087fd1faa/ff227e07-8e36-48d6-a8c7-1e0087fd1faa.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 827.992202] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c27004f-959d-4200-b6b5-4607fb9696b6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.002592] env[69927]: DEBUG oslo_vmware.api [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 828.002592] env[69927]: value = "task-4095832" [ 828.002592] env[69927]: _type = "Task" [ 828.002592] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.015115] env[69927]: DEBUG oslo_vmware.api [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4095832, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.041055] env[69927]: DEBUG nova.compute.manager [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 828.318558] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquiring lock "refresh_cache-e0bca101-cf8d-48e1-a331-b0018548593e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.318820] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquired lock "refresh_cache-e0bca101-cf8d-48e1-a331-b0018548593e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 828.319119] env[69927]: DEBUG nova.network.neutron [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 828.418166] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095831, 'name': Rename_Task, 'duration_secs': 0.164115} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.418483] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 828.418780] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aee72df3-cd98-4aca-bf9b-c5d625cd441e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.428235] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Waiting for the task: (returnval){ [ 828.428235] env[69927]: value = "task-4095833" [ 828.428235] env[69927]: _type = "Task" [ 828.428235] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.440967] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095833, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.515492] env[69927]: DEBUG oslo_vmware.api [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4095832, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.572091] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.706864] env[69927]: DEBUG nova.compute.manager [req-7083e9a4-cbcd-4592-8735-acfc57cad58a req-adc80963-2d0d-46b6-9b69-e5efd3839084 service nova] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Received event network-vif-plugged-efc77bd5-a980-4a4e-9211-70184239a8ee {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 828.707112] env[69927]: DEBUG oslo_concurrency.lockutils [req-7083e9a4-cbcd-4592-8735-acfc57cad58a req-adc80963-2d0d-46b6-9b69-e5efd3839084 service nova] Acquiring lock "e0bca101-cf8d-48e1-a331-b0018548593e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.708041] env[69927]: DEBUG oslo_concurrency.lockutils [req-7083e9a4-cbcd-4592-8735-acfc57cad58a req-adc80963-2d0d-46b6-9b69-e5efd3839084 service nova] Lock "e0bca101-cf8d-48e1-a331-b0018548593e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.708041] env[69927]: DEBUG oslo_concurrency.lockutils [req-7083e9a4-cbcd-4592-8735-acfc57cad58a req-adc80963-2d0d-46b6-9b69-e5efd3839084 service nova] Lock "e0bca101-cf8d-48e1-a331-b0018548593e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 828.708041] env[69927]: DEBUG nova.compute.manager [req-7083e9a4-cbcd-4592-8735-acfc57cad58a req-adc80963-2d0d-46b6-9b69-e5efd3839084 service nova] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] No waiting events found dispatching network-vif-plugged-efc77bd5-a980-4a4e-9211-70184239a8ee {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 828.708041] env[69927]: WARNING nova.compute.manager [req-7083e9a4-cbcd-4592-8735-acfc57cad58a req-adc80963-2d0d-46b6-9b69-e5efd3839084 service nova] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Received unexpected event network-vif-plugged-efc77bd5-a980-4a4e-9211-70184239a8ee for instance with vm_state building and task_state spawning. [ 828.708041] env[69927]: DEBUG nova.compute.manager [req-7083e9a4-cbcd-4592-8735-acfc57cad58a req-adc80963-2d0d-46b6-9b69-e5efd3839084 service nova] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Received event network-changed-efc77bd5-a980-4a4e-9211-70184239a8ee {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 828.712971] env[69927]: DEBUG nova.compute.manager [req-7083e9a4-cbcd-4592-8735-acfc57cad58a req-adc80963-2d0d-46b6-9b69-e5efd3839084 service nova] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Refreshing instance network info cache due to event network-changed-efc77bd5-a980-4a4e-9211-70184239a8ee. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 828.712971] env[69927]: DEBUG oslo_concurrency.lockutils [req-7083e9a4-cbcd-4592-8735-acfc57cad58a req-adc80963-2d0d-46b6-9b69-e5efd3839084 service nova] Acquiring lock "refresh_cache-e0bca101-cf8d-48e1-a331-b0018548593e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.869055] env[69927]: DEBUG nova.network.neutron [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 828.941769] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-604e9c70-6588-4152-a987-f6c074ec78cc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.951721] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095833, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.956452] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-617f78b2-1871-47f9-86ae-4e42f2e9eba7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.997434] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5888bf12-5952-4e35-8f54-edaadbe10baa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.009841] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d464aebf-88ec-4045-9bd6-c6f18f2a8ea1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.022022] env[69927]: DEBUG oslo_vmware.api [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4095832, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.600559} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.030960] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] ff227e07-8e36-48d6-a8c7-1e0087fd1faa/ff227e07-8e36-48d6-a8c7-1e0087fd1faa.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 829.031328] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 829.031965] env[69927]: DEBUG nova.compute.provider_tree [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 829.034018] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cfce651e-664a-42da-ac50-6fb01261a84b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.043815] env[69927]: DEBUG oslo_vmware.api [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 829.043815] env[69927]: value = "task-4095834" [ 829.043815] env[69927]: _type = "Task" [ 829.043815] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.061425] env[69927]: DEBUG oslo_vmware.api [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4095834, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.121117] env[69927]: DEBUG nova.network.neutron [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Updating instance_info_cache with network_info: [{"id": "efc77bd5-a980-4a4e-9211-70184239a8ee", "address": "fa:16:3e:a4:62:f3", "network": {"id": "8b41ee82-5412-4d31-ae76-47e8663487eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-523653041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66cbe813421e40d1bd515411bc3c045a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ffb921-796a-40fe-9662-d3fc01547dcb", "external-id": "nsx-vlan-transportzone-331", "segmentation_id": 331, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefc77bd5-a9", "ovs_interfaceid": "efc77bd5-a980-4a4e-9211-70184239a8ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.445792] env[69927]: DEBUG oslo_vmware.api [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095833, 'name': PowerOnVM_Task, 'duration_secs': 0.680895} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.446095] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 829.446300] env[69927]: INFO nova.compute.manager [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Took 8.66 seconds to spawn the instance on the hypervisor. [ 829.446482] env[69927]: DEBUG nova.compute.manager [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 829.447320] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-832a6246-3d56-46ca-9303-0c894a48970b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.557356] env[69927]: DEBUG oslo_vmware.api [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4095834, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0862} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.558403] env[69927]: ERROR nova.scheduler.client.report [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [req-dd3545ad-2cca-44dc-afb5-70256d809030] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2f529b36-df5f-4b37-8103-68f74f737726. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-dd3545ad-2cca-44dc-afb5-70256d809030"}]} [ 829.558791] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 829.562613] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8692a08-5c37-4b0c-bd28-342863482b00 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.589096] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] ff227e07-8e36-48d6-a8c7-1e0087fd1faa/ff227e07-8e36-48d6-a8c7-1e0087fd1faa.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 829.590157] env[69927]: DEBUG nova.scheduler.client.report [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Refreshing inventories for resource provider 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 829.596043] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-088c1e4c-1cf8-4dc8-a01f-8c850ff7fa05 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.620904] env[69927]: DEBUG oslo_vmware.api [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 829.620904] env[69927]: value = "task-4095835" [ 829.620904] env[69927]: _type = "Task" [ 829.620904] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.625501] env[69927]: DEBUG nova.scheduler.client.report [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Updating ProviderTree inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 829.625966] env[69927]: DEBUG nova.compute.provider_tree [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 829.627939] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Releasing lock "refresh_cache-e0bca101-cf8d-48e1-a331-b0018548593e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 829.628346] env[69927]: DEBUG nova.compute.manager [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Instance network_info: |[{"id": "efc77bd5-a980-4a4e-9211-70184239a8ee", "address": "fa:16:3e:a4:62:f3", "network": {"id": "8b41ee82-5412-4d31-ae76-47e8663487eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-523653041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66cbe813421e40d1bd515411bc3c045a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ffb921-796a-40fe-9662-d3fc01547dcb", "external-id": "nsx-vlan-transportzone-331", "segmentation_id": 331, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefc77bd5-a9", "ovs_interfaceid": "efc77bd5-a980-4a4e-9211-70184239a8ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 829.628845] env[69927]: DEBUG oslo_concurrency.lockutils [req-7083e9a4-cbcd-4592-8735-acfc57cad58a req-adc80963-2d0d-46b6-9b69-e5efd3839084 service nova] Acquired lock "refresh_cache-e0bca101-cf8d-48e1-a331-b0018548593e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 829.629443] env[69927]: DEBUG nova.network.neutron [req-7083e9a4-cbcd-4592-8735-acfc57cad58a req-adc80963-2d0d-46b6-9b69-e5efd3839084 service nova] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Refreshing network info cache for port efc77bd5-a980-4a4e-9211-70184239a8ee {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 829.630231] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:62:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '75ffb921-796a-40fe-9662-d3fc01547dcb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'efc77bd5-a980-4a4e-9211-70184239a8ee', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 829.639486] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Creating folder: Project (66cbe813421e40d1bd515411bc3c045a). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 829.639486] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f62549a7-27a1-412a-af53-44248531b29f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.645191] env[69927]: DEBUG oslo_vmware.api [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4095835, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.646487] env[69927]: DEBUG nova.scheduler.client.report [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Refreshing aggregate associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, aggregates: None {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 829.659931] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Created folder: Project (66cbe813421e40d1bd515411bc3c045a) in parent group-v811283. [ 829.660187] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Creating folder: Instances. Parent ref: group-v811414. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 829.660806] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1edc5efe-ba95-4b33-90ca-2c6970ff6224 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.668627] env[69927]: DEBUG nova.scheduler.client.report [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Refreshing trait associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 829.672560] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Created folder: Instances in parent group-v811414. [ 829.672899] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 829.673114] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 829.673339] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3de675c5-1a42-4efb-a5bf-0d343c17b7c7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.695663] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 829.695663] env[69927]: value = "task-4095838" [ 829.695663] env[69927]: _type = "Task" [ 829.695663] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.703744] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095838, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.964675] env[69927]: INFO nova.compute.manager [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Took 56.31 seconds to build instance. [ 830.050371] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "a9a62523-50fb-44b2-bfc8-9c6664dbf050" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.050634] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "a9a62523-50fb-44b2-bfc8-9c6664dbf050" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 830.132389] env[69927]: DEBUG oslo_vmware.api [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4095835, 'name': ReconfigVM_Task, 'duration_secs': 0.45406} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.132718] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Reconfigured VM instance instance-00000028 to attach disk [datastore2] ff227e07-8e36-48d6-a8c7-1e0087fd1faa/ff227e07-8e36-48d6-a8c7-1e0087fd1faa.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 830.133390] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eb0bf5a2-d4c4-4778-92ae-e408f6f17f5a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.146023] env[69927]: DEBUG oslo_vmware.api [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 830.146023] env[69927]: value = "task-4095839" [ 830.146023] env[69927]: _type = "Task" [ 830.146023] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.156907] env[69927]: DEBUG oslo_vmware.api [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4095839, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.212751] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095838, 'name': CreateVM_Task, 'duration_secs': 0.422901} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.213133] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 830.214342] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.214670] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.215371] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 830.220091] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64b5bd87-5458-488b-bece-3f44ba43128e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.227653] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Waiting for the task: (returnval){ [ 830.227653] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5275ef45-8f4a-dfb3-164d-dcba0fa1ee10" [ 830.227653] env[69927]: _type = "Task" [ 830.227653] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.241331] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5275ef45-8f4a-dfb3-164d-dcba0fa1ee10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.360826] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76e4dcb-d10d-4731-9309-6cee630122fc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.369011] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa70fcd-cffa-4c10-bcbc-c0d9f94d2e05 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.403824] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6504dff7-ef50-4fa7-bd89-f2c40071e945 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.412233] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0faf50b0-d7e9-42a5-9007-347ae75556b3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.427181] env[69927]: DEBUG nova.compute.provider_tree [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 830.439720] env[69927]: DEBUG nova.network.neutron [req-7083e9a4-cbcd-4592-8735-acfc57cad58a req-adc80963-2d0d-46b6-9b69-e5efd3839084 service nova] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Updated VIF entry in instance network info cache for port efc77bd5-a980-4a4e-9211-70184239a8ee. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 830.440088] env[69927]: DEBUG nova.network.neutron [req-7083e9a4-cbcd-4592-8735-acfc57cad58a req-adc80963-2d0d-46b6-9b69-e5efd3839084 service nova] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Updating instance_info_cache with network_info: [{"id": "efc77bd5-a980-4a4e-9211-70184239a8ee", "address": "fa:16:3e:a4:62:f3", "network": {"id": "8b41ee82-5412-4d31-ae76-47e8663487eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-523653041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66cbe813421e40d1bd515411bc3c045a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ffb921-796a-40fe-9662-d3fc01547dcb", "external-id": "nsx-vlan-transportzone-331", "segmentation_id": 331, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefc77bd5-a9", "ovs_interfaceid": "efc77bd5-a980-4a4e-9211-70184239a8ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.469227] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b402e84-30d0-416a-8258-1ab805df91d8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lock "a36b06ca-77c8-4d2f-8b43-2c160fbac93f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 106.764s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 830.659358] env[69927]: DEBUG oslo_vmware.api [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4095839, 'name': Rename_Task, 'duration_secs': 0.171019} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.659358] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 830.659358] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eeabbd5b-3283-4fd1-8a04-b241fd453193 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.666543] env[69927]: DEBUG oslo_vmware.api [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 830.666543] env[69927]: value = "task-4095840" [ 830.666543] env[69927]: _type = "Task" [ 830.666543] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.678267] env[69927]: DEBUG oslo_vmware.api [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4095840, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.740653] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5275ef45-8f4a-dfb3-164d-dcba0fa1ee10, 'name': SearchDatastore_Task, 'duration_secs': 0.01292} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.742953] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 830.742953] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 830.742953] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.742953] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.743089] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 830.743089] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-edafc12a-68a6-45de-99f1-5121e5c24259 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.756177] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 830.756177] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 830.756177] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29dee4f4-e34e-4386-8a8c-945e8a7a22f7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.769162] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Waiting for the task: (returnval){ [ 830.769162] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5244be7f-570f-1201-98ac-047eaab22825" [ 830.769162] env[69927]: _type = "Task" [ 830.769162] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.782304] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5244be7f-570f-1201-98ac-047eaab22825, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.942537] env[69927]: DEBUG oslo_concurrency.lockutils [req-7083e9a4-cbcd-4592-8735-acfc57cad58a req-adc80963-2d0d-46b6-9b69-e5efd3839084 service nova] Releasing lock "refresh_cache-e0bca101-cf8d-48e1-a331-b0018548593e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 830.970026] env[69927]: DEBUG nova.scheduler.client.report [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Updated inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 with generation 67 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 830.971055] env[69927]: DEBUG nova.compute.provider_tree [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Updating resource provider 2f529b36-df5f-4b37-8103-68f74f737726 generation from 67 to 68 during operation: update_inventory {{(pid=69927) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 830.971335] env[69927]: DEBUG nova.compute.provider_tree [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 830.975796] env[69927]: DEBUG nova.compute.manager [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 831.181912] env[69927]: DEBUG oslo_vmware.api [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4095840, 'name': PowerOnVM_Task, 'duration_secs': 0.499855} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.181912] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 831.181912] env[69927]: INFO nova.compute.manager [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Took 7.71 seconds to spawn the instance on the hypervisor. [ 831.181912] env[69927]: DEBUG nova.compute.manager [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 831.182897] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e7bb849-b2db-4b81-b9db-fce4b42b6af3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.279969] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5244be7f-570f-1201-98ac-047eaab22825, 'name': SearchDatastore_Task, 'duration_secs': 0.016414} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.282492] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-883eaa69-99fe-429e-ac44-abc50a5ee64e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.294026] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Waiting for the task: (returnval){ [ 831.294026] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520a750a-a496-20c5-f3db-d93d014d914a" [ 831.294026] env[69927]: _type = "Task" [ 831.294026] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.305830] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520a750a-a496-20c5-f3db-d93d014d914a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.413893] env[69927]: DEBUG nova.compute.manager [req-6af9478c-74d4-4218-94f1-02ba7d22ae1c req-7be11a4a-8039-40bd-b827-5349599e1230 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Received event network-changed-5615148b-36c9-40b6-9282-76bdcfb9931e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 831.414168] env[69927]: DEBUG nova.compute.manager [req-6af9478c-74d4-4218-94f1-02ba7d22ae1c req-7be11a4a-8039-40bd-b827-5349599e1230 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Refreshing instance network info cache due to event network-changed-5615148b-36c9-40b6-9282-76bdcfb9931e. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 831.414821] env[69927]: DEBUG oslo_concurrency.lockutils [req-6af9478c-74d4-4218-94f1-02ba7d22ae1c req-7be11a4a-8039-40bd-b827-5349599e1230 service nova] Acquiring lock "refresh_cache-a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.414821] env[69927]: DEBUG oslo_concurrency.lockutils [req-6af9478c-74d4-4218-94f1-02ba7d22ae1c req-7be11a4a-8039-40bd-b827-5349599e1230 service nova] Acquired lock "refresh_cache-a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 831.414821] env[69927]: DEBUG nova.network.neutron [req-6af9478c-74d4-4218-94f1-02ba7d22ae1c req-7be11a4a-8039-40bd-b827-5349599e1230 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Refreshing network info cache for port 5615148b-36c9-40b6-9282-76bdcfb9931e {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 831.482304] env[69927]: DEBUG oslo_concurrency.lockutils [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.217s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.482395] env[69927]: DEBUG nova.compute.manager [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 831.487895] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.289s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.488248] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.490585] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.725s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.490778] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.492932] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 33.662s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.494511] env[69927]: DEBUG nova.objects.instance [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69927) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 831.536712] env[69927]: INFO nova.scheduler.client.report [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Deleted allocations for instance c2b6b943-f6d6-427f-aba5-1d619d889325 [ 831.538537] env[69927]: INFO nova.scheduler.client.report [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Deleted allocations for instance bf4bee47-36ce-43ee-96f1-96f262882986 [ 831.555380] env[69927]: DEBUG oslo_concurrency.lockutils [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.708788] env[69927]: INFO nova.compute.manager [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Took 57.97 seconds to build instance. [ 831.807195] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520a750a-a496-20c5-f3db-d93d014d914a, 'name': SearchDatastore_Task, 'duration_secs': 0.02934} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.807526] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.807794] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] e0bca101-cf8d-48e1-a331-b0018548593e/e0bca101-cf8d-48e1-a331-b0018548593e.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 831.808372] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-21d7eb9e-f723-4d10-b3f3-32a8214ca22f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.815822] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Waiting for the task: (returnval){ [ 831.815822] env[69927]: value = "task-4095841" [ 831.815822] env[69927]: _type = "Task" [ 831.815822] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.824883] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4095841, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.990304] env[69927]: DEBUG nova.compute.utils [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 831.991421] env[69927]: DEBUG nova.compute.manager [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 831.993087] env[69927]: DEBUG nova.network.neutron [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 832.060936] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db1f6b66-b7a0-48d0-8928-8723211a914f tempest-DeleteServersAdminTestJSON-1107524716 tempest-DeleteServersAdminTestJSON-1107524716-project-member] Lock "c2b6b943-f6d6-427f-aba5-1d619d889325" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.738s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.062743] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3490d8b3-08bb-43c4-ab22-d0298ab970b6 tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Lock "bf4bee47-36ce-43ee-96f1-96f262882986" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.154s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.096275] env[69927]: DEBUG nova.policy [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '181ec10b2e4b4f1794294d18313a5918', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '71a47794e5824701925ad4bdc3651196', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 832.212439] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2e4953a7-7cdc-4724-9f99-4c0618d11019 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "ff227e07-8e36-48d6-a8c7-1e0087fd1faa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.997s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.338939] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4095841, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.404493] env[69927]: DEBUG nova.network.neutron [req-6af9478c-74d4-4218-94f1-02ba7d22ae1c req-7be11a4a-8039-40bd-b827-5349599e1230 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Updated VIF entry in instance network info cache for port 5615148b-36c9-40b6-9282-76bdcfb9931e. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 832.404493] env[69927]: DEBUG nova.network.neutron [req-6af9478c-74d4-4218-94f1-02ba7d22ae1c req-7be11a4a-8039-40bd-b827-5349599e1230 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Updating instance_info_cache with network_info: [{"id": "5615148b-36c9-40b6-9282-76bdcfb9931e", "address": "fa:16:3e:91:a6:13", "network": {"id": "6ec78f5d-4de4-4f02-a771-97092e489acc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1456290359-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d28bd5c5feee4a39b76694d57eb3aaf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5615148b-36", "ovs_interfaceid": "5615148b-36c9-40b6-9282-76bdcfb9931e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.498371] env[69927]: DEBUG nova.compute.manager [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 832.504999] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0cefc4eb-fe51-48c5-bb35-fc4d4b5d20ac tempest-ServersAdmin275Test-578997575 tempest-ServersAdmin275Test-578997575-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.506439] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.388s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.508225] env[69927]: INFO nova.compute.claims [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 832.716484] env[69927]: DEBUG nova.compute.manager [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 832.806597] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquiring lock "a4249857-6f60-4040-b676-d2d19dc83f15" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.806894] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Lock "a4249857-6f60-4040-b676-d2d19dc83f15" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.807138] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquiring lock "a4249857-6f60-4040-b676-d2d19dc83f15-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.807325] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Lock "a4249857-6f60-4040-b676-d2d19dc83f15-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.807496] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Lock "a4249857-6f60-4040-b676-d2d19dc83f15-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.813904] env[69927]: INFO nova.compute.manager [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Terminating instance [ 832.829833] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4095841, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.600155} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.830151] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] e0bca101-cf8d-48e1-a331-b0018548593e/e0bca101-cf8d-48e1-a331-b0018548593e.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 832.830452] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 832.830929] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a3d4f202-aead-474e-bb5a-5551aa9d6da6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.839972] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Waiting for the task: (returnval){ [ 832.839972] env[69927]: value = "task-4095842" [ 832.839972] env[69927]: _type = "Task" [ 832.839972] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.851509] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4095842, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.909520] env[69927]: DEBUG oslo_concurrency.lockutils [req-6af9478c-74d4-4218-94f1-02ba7d22ae1c req-7be11a4a-8039-40bd-b827-5349599e1230 service nova] Releasing lock "refresh_cache-a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.032531] env[69927]: DEBUG nova.network.neutron [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Successfully created port: b347a042-35ea-41f5-a96e-84e4553f55d2 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 833.254551] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 833.321125] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquiring lock "refresh_cache-a4249857-6f60-4040-b676-d2d19dc83f15" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.321307] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquired lock "refresh_cache-a4249857-6f60-4040-b676-d2d19dc83f15" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.321487] env[69927]: DEBUG nova.network.neutron [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 833.357244] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4095842, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.341052} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.357502] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 833.358447] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b28c16-a337-4062-87cc-271adb0362a1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.389668] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] e0bca101-cf8d-48e1-a331-b0018548593e/e0bca101-cf8d-48e1-a331-b0018548593e.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 833.390494] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b6c0a71-67a3-4903-99c7-ce9c511479ed {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.412903] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Waiting for the task: (returnval){ [ 833.412903] env[69927]: value = "task-4095843" [ 833.412903] env[69927]: _type = "Task" [ 833.412903] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.422991] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4095843, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.510802] env[69927]: DEBUG nova.compute.manager [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 833.540859] env[69927]: DEBUG nova.virt.hardware [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 833.541114] env[69927]: DEBUG nova.virt.hardware [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 833.541322] env[69927]: DEBUG nova.virt.hardware [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 833.541520] env[69927]: DEBUG nova.virt.hardware [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 833.541656] env[69927]: DEBUG nova.virt.hardware [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 833.541803] env[69927]: DEBUG nova.virt.hardware [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 833.542611] env[69927]: DEBUG nova.virt.hardware [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 833.542611] env[69927]: DEBUG nova.virt.hardware [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 833.542611] env[69927]: DEBUG nova.virt.hardware [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 833.542611] env[69927]: DEBUG nova.virt.hardware [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 833.542860] env[69927]: DEBUG nova.virt.hardware [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 833.543689] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397a2aab-59e2-43ff-9b36-0b37ba88251f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.556185] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb5bc82-c69d-46c9-8241-f440f210efde {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.929640] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4095843, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.940577] env[69927]: DEBUG nova.compute.manager [req-d79585f6-e266-416f-89c2-6ef9f4e011ae req-826f143a-20a1-43bc-8c1c-2491dc6659e1 service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Received event network-changed-ef273cd9-400b-409d-b689-297040f2ca04 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 833.941559] env[69927]: DEBUG nova.compute.manager [req-d79585f6-e266-416f-89c2-6ef9f4e011ae req-826f143a-20a1-43bc-8c1c-2491dc6659e1 service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Refreshing instance network info cache due to event network-changed-ef273cd9-400b-409d-b689-297040f2ca04. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 833.941559] env[69927]: DEBUG oslo_concurrency.lockutils [req-d79585f6-e266-416f-89c2-6ef9f4e011ae req-826f143a-20a1-43bc-8c1c-2491dc6659e1 service nova] Acquiring lock "refresh_cache-ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.941559] env[69927]: DEBUG oslo_concurrency.lockutils [req-d79585f6-e266-416f-89c2-6ef9f4e011ae req-826f143a-20a1-43bc-8c1c-2491dc6659e1 service nova] Acquired lock "refresh_cache-ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.941693] env[69927]: DEBUG nova.network.neutron [req-d79585f6-e266-416f-89c2-6ef9f4e011ae req-826f143a-20a1-43bc-8c1c-2491dc6659e1 service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Refreshing network info cache for port ef273cd9-400b-409d-b689-297040f2ca04 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 834.040079] env[69927]: DEBUG nova.network.neutron [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 834.110821] env[69927]: DEBUG nova.network.neutron [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.136976] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb04b35-27ef-4cd7-ae45-7ea3cddf2769 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.147061] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e60a42fd-ded1-465d-9fb3-a6e7a93eb7bc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.186391] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b95308-98ed-4652-be46-bf3522341f25 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.196949] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8229f64-8dac-48c0-bd48-be61dca09279 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.212910] env[69927]: DEBUG nova.compute.provider_tree [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 834.382786] env[69927]: DEBUG nova.compute.manager [req-613c1e8f-4006-4252-ab54-18aea260c66d req-4438df9f-b8bc-4b24-a105-2ceea579c795 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Received event network-changed-5615148b-36c9-40b6-9282-76bdcfb9931e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 834.383031] env[69927]: DEBUG nova.compute.manager [req-613c1e8f-4006-4252-ab54-18aea260c66d req-4438df9f-b8bc-4b24-a105-2ceea579c795 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Refreshing instance network info cache due to event network-changed-5615148b-36c9-40b6-9282-76bdcfb9931e. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 834.383259] env[69927]: DEBUG oslo_concurrency.lockutils [req-613c1e8f-4006-4252-ab54-18aea260c66d req-4438df9f-b8bc-4b24-a105-2ceea579c795 service nova] Acquiring lock "refresh_cache-a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.383404] env[69927]: DEBUG oslo_concurrency.lockutils [req-613c1e8f-4006-4252-ab54-18aea260c66d req-4438df9f-b8bc-4b24-a105-2ceea579c795 service nova] Acquired lock "refresh_cache-a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.383564] env[69927]: DEBUG nova.network.neutron [req-613c1e8f-4006-4252-ab54-18aea260c66d req-4438df9f-b8bc-4b24-a105-2ceea579c795 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Refreshing network info cache for port 5615148b-36c9-40b6-9282-76bdcfb9931e {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 834.424404] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4095843, 'name': ReconfigVM_Task, 'duration_secs': 0.569747} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.424619] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Reconfigured VM instance instance-00000029 to attach disk [datastore1] e0bca101-cf8d-48e1-a331-b0018548593e/e0bca101-cf8d-48e1-a331-b0018548593e.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 834.426209] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2fcf890f-723c-40d8-96ab-7ec8d2fdf824 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.432855] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Waiting for the task: (returnval){ [ 834.432855] env[69927]: value = "task-4095844" [ 834.432855] env[69927]: _type = "Task" [ 834.432855] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.443646] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4095844, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.611699] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Releasing lock "refresh_cache-a4249857-6f60-4040-b676-d2d19dc83f15" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.612198] env[69927]: DEBUG nova.compute.manager [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 834.612531] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 834.613684] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d052537-104d-4de5-a7ab-b9444513ee0b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.629562] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 834.629711] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b64b8d8e-6f28-475a-a876-a334b77aea62 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.636604] env[69927]: DEBUG oslo_vmware.api [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 834.636604] env[69927]: value = "task-4095845" [ 834.636604] env[69927]: _type = "Task" [ 834.636604] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.647012] env[69927]: DEBUG oslo_vmware.api [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095845, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.704428] env[69927]: DEBUG nova.network.neutron [req-d79585f6-e266-416f-89c2-6ef9f4e011ae req-826f143a-20a1-43bc-8c1c-2491dc6659e1 service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Updated VIF entry in instance network info cache for port ef273cd9-400b-409d-b689-297040f2ca04. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 834.704741] env[69927]: DEBUG nova.network.neutron [req-d79585f6-e266-416f-89c2-6ef9f4e011ae req-826f143a-20a1-43bc-8c1c-2491dc6659e1 service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Updating instance_info_cache with network_info: [{"id": "ef273cd9-400b-409d-b689-297040f2ca04", "address": "fa:16:3e:3c:b9:fd", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef273cd9-40", "ovs_interfaceid": "ef273cd9-400b-409d-b689-297040f2ca04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.757400] env[69927]: DEBUG nova.scheduler.client.report [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Updated inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 with generation 68 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 834.757663] env[69927]: DEBUG nova.compute.provider_tree [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Updating resource provider 2f529b36-df5f-4b37-8103-68f74f737726 generation from 68 to 69 during operation: update_inventory {{(pid=69927) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 834.757851] env[69927]: DEBUG nova.compute.provider_tree [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 834.944780] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4095844, 'name': Rename_Task, 'duration_secs': 0.212953} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.945594] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 834.946078] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-23b2a615-8da5-4aff-b600-36f55da2c9e8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.957781] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Waiting for the task: (returnval){ [ 834.957781] env[69927]: value = "task-4095846" [ 834.957781] env[69927]: _type = "Task" [ 834.957781] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.974168] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4095846, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.129708] env[69927]: DEBUG nova.network.neutron [req-613c1e8f-4006-4252-ab54-18aea260c66d req-4438df9f-b8bc-4b24-a105-2ceea579c795 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Updated VIF entry in instance network info cache for port 5615148b-36c9-40b6-9282-76bdcfb9931e. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 835.130120] env[69927]: DEBUG nova.network.neutron [req-613c1e8f-4006-4252-ab54-18aea260c66d req-4438df9f-b8bc-4b24-a105-2ceea579c795 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Updating instance_info_cache with network_info: [{"id": "5615148b-36c9-40b6-9282-76bdcfb9931e", "address": "fa:16:3e:91:a6:13", "network": {"id": "6ec78f5d-4de4-4f02-a771-97092e489acc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1456290359-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d28bd5c5feee4a39b76694d57eb3aaf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5615148b-36", "ovs_interfaceid": "5615148b-36c9-40b6-9282-76bdcfb9931e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.149877] env[69927]: DEBUG oslo_vmware.api [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095845, 'name': PowerOffVM_Task, 'duration_secs': 0.430686} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.150205] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 835.150512] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 835.150563] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-da1b386f-b107-4b13-bed0-a9d6cce3be39 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.152863] env[69927]: DEBUG nova.network.neutron [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Successfully updated port: b347a042-35ea-41f5-a96e-84e4553f55d2 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 835.183043] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 835.183043] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 835.183043] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Deleting the datastore file [datastore1] a4249857-6f60-4040-b676-d2d19dc83f15 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 835.183043] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e742d080-3dfc-47f7-9513-dc21c48c022a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.191694] env[69927]: DEBUG oslo_vmware.api [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for the task: (returnval){ [ 835.191694] env[69927]: value = "task-4095848" [ 835.191694] env[69927]: _type = "Task" [ 835.191694] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.201022] env[69927]: DEBUG oslo_vmware.api [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095848, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.210466] env[69927]: DEBUG oslo_concurrency.lockutils [req-d79585f6-e266-416f-89c2-6ef9f4e011ae req-826f143a-20a1-43bc-8c1c-2491dc6659e1 service nova] Releasing lock "refresh_cache-ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 835.266904] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.760s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.267459] env[69927]: DEBUG nova.compute.manager [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 835.270209] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.098s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 835.271628] env[69927]: INFO nova.compute.claims [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 835.470288] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4095846, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.633891] env[69927]: DEBUG oslo_concurrency.lockutils [req-613c1e8f-4006-4252-ab54-18aea260c66d req-4438df9f-b8bc-4b24-a105-2ceea579c795 service nova] Releasing lock "refresh_cache-a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 835.634647] env[69927]: DEBUG nova.compute.manager [req-613c1e8f-4006-4252-ab54-18aea260c66d req-4438df9f-b8bc-4b24-a105-2ceea579c795 service nova] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Received event network-changed-9db3f0b7-d302-466d-93ae-16dcd9a9f682 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 835.634647] env[69927]: DEBUG nova.compute.manager [req-613c1e8f-4006-4252-ab54-18aea260c66d req-4438df9f-b8bc-4b24-a105-2ceea579c795 service nova] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Refreshing instance network info cache due to event network-changed-9db3f0b7-d302-466d-93ae-16dcd9a9f682. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 835.634906] env[69927]: DEBUG oslo_concurrency.lockutils [req-613c1e8f-4006-4252-ab54-18aea260c66d req-4438df9f-b8bc-4b24-a105-2ceea579c795 service nova] Acquiring lock "refresh_cache-a36b06ca-77c8-4d2f-8b43-2c160fbac93f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.635079] env[69927]: DEBUG oslo_concurrency.lockutils [req-613c1e8f-4006-4252-ab54-18aea260c66d req-4438df9f-b8bc-4b24-a105-2ceea579c795 service nova] Acquired lock "refresh_cache-a36b06ca-77c8-4d2f-8b43-2c160fbac93f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 835.635252] env[69927]: DEBUG nova.network.neutron [req-613c1e8f-4006-4252-ab54-18aea260c66d req-4438df9f-b8bc-4b24-a105-2ceea579c795 service nova] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Refreshing network info cache for port 9db3f0b7-d302-466d-93ae-16dcd9a9f682 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 835.656258] env[69927]: DEBUG oslo_concurrency.lockutils [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "refresh_cache-e1b3d0bc-a251-4dbd-89a6-216a2f2c1313" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.656421] env[69927]: DEBUG oslo_concurrency.lockutils [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquired lock "refresh_cache-e1b3d0bc-a251-4dbd-89a6-216a2f2c1313" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 835.656576] env[69927]: DEBUG nova.network.neutron [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 835.703859] env[69927]: DEBUG oslo_vmware.api [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Task: {'id': task-4095848, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.304858} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.704407] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 835.704801] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 835.705961] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 835.706178] env[69927]: INFO nova.compute.manager [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Took 1.09 seconds to destroy the instance on the hypervisor. [ 835.706502] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 835.706793] env[69927]: DEBUG nova.compute.manager [-] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 835.706937] env[69927]: DEBUG nova.network.neutron [-] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 835.748489] env[69927]: DEBUG nova.network.neutron [-] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 835.777673] env[69927]: DEBUG nova.compute.utils [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 835.782012] env[69927]: DEBUG nova.compute.manager [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 835.782246] env[69927]: DEBUG nova.network.neutron [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 835.894069] env[69927]: DEBUG nova.policy [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20629f26389d40199a4c5d5d2312dbae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c2fb1fc4c3ae41a5b331c6be7973eb72', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 835.973034] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4095846, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.221170] env[69927]: DEBUG nova.network.neutron [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 836.251831] env[69927]: DEBUG nova.network.neutron [-] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.283705] env[69927]: DEBUG nova.compute.manager [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 836.472895] env[69927]: DEBUG oslo_vmware.api [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4095846, 'name': PowerOnVM_Task, 'duration_secs': 1.253902} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.473414] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 836.473628] env[69927]: INFO nova.compute.manager [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Took 9.22 seconds to spawn the instance on the hypervisor. [ 836.473804] env[69927]: DEBUG nova.compute.manager [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 836.475020] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b56d70-80f1-4e0e-83c5-110ad914ab56 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.494883] env[69927]: DEBUG nova.network.neutron [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Updating instance_info_cache with network_info: [{"id": "b347a042-35ea-41f5-a96e-84e4553f55d2", "address": "fa:16:3e:10:1a:38", "network": {"id": "095e5f02-dbfd-4d96-bd31-c8c5e870e449", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-240997183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "71a47794e5824701925ad4bdc3651196", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7b7edd0-124a-48ec-ae26-1aa14f9b884a", "external-id": "nsx-vlan-transportzone-861", "segmentation_id": 861, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb347a042-35", "ovs_interfaceid": "b347a042-35ea-41f5-a96e-84e4553f55d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.511856] env[69927]: DEBUG nova.network.neutron [req-613c1e8f-4006-4252-ab54-18aea260c66d req-4438df9f-b8bc-4b24-a105-2ceea579c795 service nova] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Updated VIF entry in instance network info cache for port 9db3f0b7-d302-466d-93ae-16dcd9a9f682. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 836.512213] env[69927]: DEBUG nova.network.neutron [req-613c1e8f-4006-4252-ab54-18aea260c66d req-4438df9f-b8bc-4b24-a105-2ceea579c795 service nova] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Updating instance_info_cache with network_info: [{"id": "9db3f0b7-d302-466d-93ae-16dcd9a9f682", "address": "fa:16:3e:f9:89:59", "network": {"id": "6ec78f5d-4de4-4f02-a771-97092e489acc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1456290359-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d28bd5c5feee4a39b76694d57eb3aaf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9db3f0b7-d3", "ovs_interfaceid": "9db3f0b7-d302-466d-93ae-16dcd9a9f682", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.588031] env[69927]: DEBUG nova.network.neutron [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Successfully created port: d0780c40-8a68-4d93-938c-96312b4436ec {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 836.762301] env[69927]: INFO nova.compute.manager [-] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Took 1.06 seconds to deallocate network for instance. [ 836.763957] env[69927]: DEBUG nova.compute.manager [req-8ae92a91-81ee-4382-85fc-30f63e4d5975 req-d7882b10-5e02-460e-a3db-a47c0d5fd3c9 service nova] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Received event network-vif-plugged-b347a042-35ea-41f5-a96e-84e4553f55d2 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 836.765024] env[69927]: DEBUG oslo_concurrency.lockutils [req-8ae92a91-81ee-4382-85fc-30f63e4d5975 req-d7882b10-5e02-460e-a3db-a47c0d5fd3c9 service nova] Acquiring lock "e1b3d0bc-a251-4dbd-89a6-216a2f2c1313-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 836.765024] env[69927]: DEBUG oslo_concurrency.lockutils [req-8ae92a91-81ee-4382-85fc-30f63e4d5975 req-d7882b10-5e02-460e-a3db-a47c0d5fd3c9 service nova] Lock "e1b3d0bc-a251-4dbd-89a6-216a2f2c1313-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.765024] env[69927]: DEBUG oslo_concurrency.lockutils [req-8ae92a91-81ee-4382-85fc-30f63e4d5975 req-d7882b10-5e02-460e-a3db-a47c0d5fd3c9 service nova] Lock "e1b3d0bc-a251-4dbd-89a6-216a2f2c1313-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.765024] env[69927]: DEBUG nova.compute.manager [req-8ae92a91-81ee-4382-85fc-30f63e4d5975 req-d7882b10-5e02-460e-a3db-a47c0d5fd3c9 service nova] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] No waiting events found dispatching network-vif-plugged-b347a042-35ea-41f5-a96e-84e4553f55d2 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 836.765024] env[69927]: WARNING nova.compute.manager [req-8ae92a91-81ee-4382-85fc-30f63e4d5975 req-d7882b10-5e02-460e-a3db-a47c0d5fd3c9 service nova] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Received unexpected event network-vif-plugged-b347a042-35ea-41f5-a96e-84e4553f55d2 for instance with vm_state building and task_state spawning. [ 836.765375] env[69927]: DEBUG nova.compute.manager [req-8ae92a91-81ee-4382-85fc-30f63e4d5975 req-d7882b10-5e02-460e-a3db-a47c0d5fd3c9 service nova] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Received event network-changed-b347a042-35ea-41f5-a96e-84e4553f55d2 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 836.765375] env[69927]: DEBUG nova.compute.manager [req-8ae92a91-81ee-4382-85fc-30f63e4d5975 req-d7882b10-5e02-460e-a3db-a47c0d5fd3c9 service nova] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Refreshing instance network info cache due to event network-changed-b347a042-35ea-41f5-a96e-84e4553f55d2. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 836.765506] env[69927]: DEBUG oslo_concurrency.lockutils [req-8ae92a91-81ee-4382-85fc-30f63e4d5975 req-d7882b10-5e02-460e-a3db-a47c0d5fd3c9 service nova] Acquiring lock "refresh_cache-e1b3d0bc-a251-4dbd-89a6-216a2f2c1313" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.879030] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc768423-5992-4892-8d29-72f058654a3e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.888777] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e2915b-0898-40ef-831c-01ad747fe094 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.921270] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a744d44b-f7b5-4278-9a24-fe80644c06e8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.930487] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c50e72f-6af0-4933-98d5-3aedd3e3cb22 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.945567] env[69927]: DEBUG nova.compute.provider_tree [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 836.998776] env[69927]: DEBUG oslo_concurrency.lockutils [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Releasing lock "refresh_cache-e1b3d0bc-a251-4dbd-89a6-216a2f2c1313" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 836.999131] env[69927]: DEBUG nova.compute.manager [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Instance network_info: |[{"id": "b347a042-35ea-41f5-a96e-84e4553f55d2", "address": "fa:16:3e:10:1a:38", "network": {"id": "095e5f02-dbfd-4d96-bd31-c8c5e870e449", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-240997183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "71a47794e5824701925ad4bdc3651196", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7b7edd0-124a-48ec-ae26-1aa14f9b884a", "external-id": "nsx-vlan-transportzone-861", "segmentation_id": 861, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb347a042-35", "ovs_interfaceid": "b347a042-35ea-41f5-a96e-84e4553f55d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 836.999528] env[69927]: DEBUG oslo_concurrency.lockutils [req-8ae92a91-81ee-4382-85fc-30f63e4d5975 req-d7882b10-5e02-460e-a3db-a47c0d5fd3c9 service nova] Acquired lock "refresh_cache-e1b3d0bc-a251-4dbd-89a6-216a2f2c1313" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 836.999528] env[69927]: DEBUG nova.network.neutron [req-8ae92a91-81ee-4382-85fc-30f63e4d5975 req-d7882b10-5e02-460e-a3db-a47c0d5fd3c9 service nova] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Refreshing network info cache for port b347a042-35ea-41f5-a96e-84e4553f55d2 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 837.000715] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:10:1a:38', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b7b7edd0-124a-48ec-ae26-1aa14f9b884a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b347a042-35ea-41f5-a96e-84e4553f55d2', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 837.010032] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Creating folder: Project (71a47794e5824701925ad4bdc3651196). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 837.013635] env[69927]: INFO nova.compute.manager [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Took 58.08 seconds to build instance. [ 837.013635] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-57d5a6dd-b9a1-4166-8749-0bfe8af4701e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.017882] env[69927]: DEBUG oslo_concurrency.lockutils [req-613c1e8f-4006-4252-ab54-18aea260c66d req-4438df9f-b8bc-4b24-a105-2ceea579c795 service nova] Releasing lock "refresh_cache-a36b06ca-77c8-4d2f-8b43-2c160fbac93f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 837.027352] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Created folder: Project (71a47794e5824701925ad4bdc3651196) in parent group-v811283. [ 837.027514] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Creating folder: Instances. Parent ref: group-v811417. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 837.027792] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ce4814fc-77e6-4a13-8fcc-6fc4e6dba625 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.046909] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Created folder: Instances in parent group-v811417. [ 837.047147] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 837.047355] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 837.047575] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b7708fd5-0117-4c98-a88e-ed9f3ff3a6ba {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.068586] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 837.068586] env[69927]: value = "task-4095851" [ 837.068586] env[69927]: _type = "Task" [ 837.068586] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.079409] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095851, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.277225] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 837.301032] env[69927]: DEBUG nova.compute.manager [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 837.331511] env[69927]: DEBUG nova.virt.hardware [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 837.331790] env[69927]: DEBUG nova.virt.hardware [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 837.331972] env[69927]: DEBUG nova.virt.hardware [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 837.332184] env[69927]: DEBUG nova.virt.hardware [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 837.332332] env[69927]: DEBUG nova.virt.hardware [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 837.332479] env[69927]: DEBUG nova.virt.hardware [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 837.332689] env[69927]: DEBUG nova.virt.hardware [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 837.333053] env[69927]: DEBUG nova.virt.hardware [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 837.333282] env[69927]: DEBUG nova.virt.hardware [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 837.333505] env[69927]: DEBUG nova.virt.hardware [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 837.333712] env[69927]: DEBUG nova.virt.hardware [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 837.334622] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d46ce353-5493-41dc-b055-d9f9b2410f52 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.344120] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fad47e1b-1c59-473d-96a0-6ebe4ed89c38 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.371908] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquiring lock "a36b06ca-77c8-4d2f-8b43-2c160fbac93f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 837.372157] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lock "a36b06ca-77c8-4d2f-8b43-2c160fbac93f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 837.372380] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquiring lock "a36b06ca-77c8-4d2f-8b43-2c160fbac93f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 837.372637] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lock "a36b06ca-77c8-4d2f-8b43-2c160fbac93f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 837.372969] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lock "a36b06ca-77c8-4d2f-8b43-2c160fbac93f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.376656] env[69927]: INFO nova.compute.manager [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Terminating instance [ 837.448621] env[69927]: DEBUG nova.scheduler.client.report [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 837.515306] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0209d100-8336-4340-8848-82e1fb52c45f tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Lock "e0bca101-cf8d-48e1-a331-b0018548593e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.465s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.579577] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095851, 'name': CreateVM_Task, 'duration_secs': 0.357285} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.579779] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 837.580395] env[69927]: DEBUG oslo_concurrency.lockutils [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.580558] env[69927]: DEBUG oslo_concurrency.lockutils [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 837.580881] env[69927]: DEBUG oslo_concurrency.lockutils [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 837.581223] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54d340c4-bebb-42b8-8112-5b8e77456e59 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.589672] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 837.589672] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52cf54d5-5626-83e1-f24f-5e6e9ce0b2f1" [ 837.589672] env[69927]: _type = "Task" [ 837.589672] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.599542] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52cf54d5-5626-83e1-f24f-5e6e9ce0b2f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.718331] env[69927]: DEBUG nova.network.neutron [req-8ae92a91-81ee-4382-85fc-30f63e4d5975 req-d7882b10-5e02-460e-a3db-a47c0d5fd3c9 service nova] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Updated VIF entry in instance network info cache for port b347a042-35ea-41f5-a96e-84e4553f55d2. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 837.718487] env[69927]: DEBUG nova.network.neutron [req-8ae92a91-81ee-4382-85fc-30f63e4d5975 req-d7882b10-5e02-460e-a3db-a47c0d5fd3c9 service nova] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Updating instance_info_cache with network_info: [{"id": "b347a042-35ea-41f5-a96e-84e4553f55d2", "address": "fa:16:3e:10:1a:38", "network": {"id": "095e5f02-dbfd-4d96-bd31-c8c5e870e449", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-240997183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "71a47794e5824701925ad4bdc3651196", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7b7edd0-124a-48ec-ae26-1aa14f9b884a", "external-id": "nsx-vlan-transportzone-861", "segmentation_id": 861, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb347a042-35", "ovs_interfaceid": "b347a042-35ea-41f5-a96e-84e4553f55d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.881956] env[69927]: DEBUG nova.compute.manager [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 837.882217] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 837.883119] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f080ec46-bc16-4ace-9cf2-7bab93216dc2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.892271] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 837.892552] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-60cd73ec-2242-4a10-8df4-e33cac8a4713 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.900196] env[69927]: DEBUG oslo_vmware.api [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Waiting for the task: (returnval){ [ 837.900196] env[69927]: value = "task-4095852" [ 837.900196] env[69927]: _type = "Task" [ 837.900196] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.909719] env[69927]: DEBUG oslo_vmware.api [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095852, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.958060] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.688s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.958619] env[69927]: DEBUG nova.compute.manager [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 837.964449] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.602s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 837.964449] env[69927]: DEBUG nova.objects.instance [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Lazy-loading 'resources' on Instance uuid c6a06550-33ed-4fee-bd37-3fce9c55b235 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 838.017827] env[69927]: DEBUG nova.compute.manager [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 838.101231] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52cf54d5-5626-83e1-f24f-5e6e9ce0b2f1, 'name': SearchDatastore_Task, 'duration_secs': 0.015152} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.101628] env[69927]: DEBUG oslo_concurrency.lockutils [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 838.101909] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 838.102179] env[69927]: DEBUG oslo_concurrency.lockutils [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.102325] env[69927]: DEBUG oslo_concurrency.lockutils [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 838.102546] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 838.102867] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3370c545-67c1-4fb3-8903-a30b2f9987ef {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.120911] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 838.121171] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 838.121975] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d443c863-0e17-4d14-9cd3-370d5181b84f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.130652] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 838.130652] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520784b9-ca24-01e2-303b-59ffc6004150" [ 838.130652] env[69927]: _type = "Task" [ 838.130652] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.140221] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520784b9-ca24-01e2-303b-59ffc6004150, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.221263] env[69927]: DEBUG oslo_concurrency.lockutils [req-8ae92a91-81ee-4382-85fc-30f63e4d5975 req-d7882b10-5e02-460e-a3db-a47c0d5fd3c9 service nova] Releasing lock "refresh_cache-e1b3d0bc-a251-4dbd-89a6-216a2f2c1313" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 838.221541] env[69927]: DEBUG nova.compute.manager [req-8ae92a91-81ee-4382-85fc-30f63e4d5975 req-d7882b10-5e02-460e-a3db-a47c0d5fd3c9 service nova] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Received event network-changed-9db3f0b7-d302-466d-93ae-16dcd9a9f682 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 838.221834] env[69927]: DEBUG nova.compute.manager [req-8ae92a91-81ee-4382-85fc-30f63e4d5975 req-d7882b10-5e02-460e-a3db-a47c0d5fd3c9 service nova] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Refreshing instance network info cache due to event network-changed-9db3f0b7-d302-466d-93ae-16dcd9a9f682. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 838.221927] env[69927]: DEBUG oslo_concurrency.lockutils [req-8ae92a91-81ee-4382-85fc-30f63e4d5975 req-d7882b10-5e02-460e-a3db-a47c0d5fd3c9 service nova] Acquiring lock "refresh_cache-a36b06ca-77c8-4d2f-8b43-2c160fbac93f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.222124] env[69927]: DEBUG oslo_concurrency.lockutils [req-8ae92a91-81ee-4382-85fc-30f63e4d5975 req-d7882b10-5e02-460e-a3db-a47c0d5fd3c9 service nova] Acquired lock "refresh_cache-a36b06ca-77c8-4d2f-8b43-2c160fbac93f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 838.222299] env[69927]: DEBUG nova.network.neutron [req-8ae92a91-81ee-4382-85fc-30f63e4d5975 req-d7882b10-5e02-460e-a3db-a47c0d5fd3c9 service nova] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Refreshing network info cache for port 9db3f0b7-d302-466d-93ae-16dcd9a9f682 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 838.413950] env[69927]: DEBUG oslo_vmware.api [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095852, 'name': PowerOffVM_Task, 'duration_secs': 0.224269} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.414453] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 838.414726] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 838.415134] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5b26c6dd-ced1-48f5-9f69-6c8abfccbb96 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.466239] env[69927]: DEBUG nova.compute.utils [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 838.471614] env[69927]: DEBUG nova.compute.manager [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 838.472121] env[69927]: DEBUG nova.network.neutron [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 838.493467] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 838.494051] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 838.494051] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Deleting the datastore file [datastore2] a36b06ca-77c8-4d2f-8b43-2c160fbac93f {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 838.495139] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30f5ac90-2add-491d-9ffc-22c97dc4c2f9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.509128] env[69927]: DEBUG oslo_vmware.api [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Waiting for the task: (returnval){ [ 838.509128] env[69927]: value = "task-4095854" [ 838.509128] env[69927]: _type = "Task" [ 838.509128] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.520039] env[69927]: DEBUG oslo_vmware.api [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095854, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.525929] env[69927]: DEBUG nova.policy [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1c8457c467ff48728524392de5b3fabf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53d4d694901e4d8188e06ccec88b4a1a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 838.547740] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.649951] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520784b9-ca24-01e2-303b-59ffc6004150, 'name': SearchDatastore_Task, 'duration_secs': 0.039314} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.654135] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8bf4e526-5b25-498d-8991-727eb8d203c2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.659753] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 838.659753] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52237354-07dc-5c82-866a-9d02fe49b47f" [ 838.659753] env[69927]: _type = "Task" [ 838.659753] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.672243] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52237354-07dc-5c82-866a-9d02fe49b47f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.672243] env[69927]: DEBUG nova.network.neutron [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Successfully updated port: d0780c40-8a68-4d93-938c-96312b4436ec {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 838.949434] env[69927]: DEBUG nova.network.neutron [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Successfully created port: 3da71ace-83d3-4b37-b02c-724f53d7f8bf {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 838.972767] env[69927]: DEBUG nova.compute.manager [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 839.024115] env[69927]: DEBUG oslo_vmware.api [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095854, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.298219} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.024548] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 839.024869] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 839.025100] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 839.026210] env[69927]: INFO nova.compute.manager [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Took 1.14 seconds to destroy the instance on the hypervisor. [ 839.026210] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 839.029211] env[69927]: DEBUG nova.compute.manager [-] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 839.029410] env[69927]: DEBUG nova.network.neutron [-] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 839.094026] env[69927]: DEBUG nova.network.neutron [req-8ae92a91-81ee-4382-85fc-30f63e4d5975 req-d7882b10-5e02-460e-a3db-a47c0d5fd3c9 service nova] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Updated VIF entry in instance network info cache for port 9db3f0b7-d302-466d-93ae-16dcd9a9f682. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 839.094026] env[69927]: DEBUG nova.network.neutron [req-8ae92a91-81ee-4382-85fc-30f63e4d5975 req-d7882b10-5e02-460e-a3db-a47c0d5fd3c9 service nova] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Updating instance_info_cache with network_info: [{"id": "9db3f0b7-d302-466d-93ae-16dcd9a9f682", "address": "fa:16:3e:f9:89:59", "network": {"id": "6ec78f5d-4de4-4f02-a771-97092e489acc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1456290359-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d28bd5c5feee4a39b76694d57eb3aaf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9db3f0b7-d3", "ovs_interfaceid": "9db3f0b7-d302-466d-93ae-16dcd9a9f682", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.111745] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c406560c-4b95-4f38-b325-e249800a35e0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.122836] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce4755ca-e8b8-47ef-95c9-0cb8c696d072 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.160901] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-600aa898-2dc9-4397-8fc6-b10dcef9d092 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.176803] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52237354-07dc-5c82-866a-9d02fe49b47f, 'name': SearchDatastore_Task, 'duration_secs': 0.021341} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.177382] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "refresh_cache-80fc9add-683b-424e-9876-cdcae664e2da" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.177507] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquired lock "refresh_cache-80fc9add-683b-424e-9876-cdcae664e2da" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 839.177650] env[69927]: DEBUG nova.network.neutron [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 839.182198] env[69927]: DEBUG oslo_concurrency.lockutils [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 839.182470] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] e1b3d0bc-a251-4dbd-89a6-216a2f2c1313/e1b3d0bc-a251-4dbd-89a6-216a2f2c1313.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 839.183941] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99f49513-6bde-493e-8481-127e0eadbbd2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.189848] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2e286f8d-5dbd-4dd7-b025-f518ba9c0cda {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.204197] env[69927]: DEBUG nova.compute.provider_tree [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 839.210904] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 839.210904] env[69927]: value = "task-4095855" [ 839.210904] env[69927]: _type = "Task" [ 839.210904] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.221662] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4095855, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.596433] env[69927]: DEBUG oslo_concurrency.lockutils [req-8ae92a91-81ee-4382-85fc-30f63e4d5975 req-d7882b10-5e02-460e-a3db-a47c0d5fd3c9 service nova] Releasing lock "refresh_cache-a36b06ca-77c8-4d2f-8b43-2c160fbac93f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 839.712887] env[69927]: DEBUG nova.scheduler.client.report [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 839.728068] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4095855, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.730194] env[69927]: DEBUG nova.network.neutron [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 839.790331] env[69927]: DEBUG nova.compute.manager [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Received event network-changed-5615148b-36c9-40b6-9282-76bdcfb9931e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 839.790543] env[69927]: DEBUG nova.compute.manager [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Refreshing instance network info cache due to event network-changed-5615148b-36c9-40b6-9282-76bdcfb9931e. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 839.790784] env[69927]: DEBUG oslo_concurrency.lockutils [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] Acquiring lock "refresh_cache-a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.790927] env[69927]: DEBUG oslo_concurrency.lockutils [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] Acquired lock "refresh_cache-a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 839.791133] env[69927]: DEBUG nova.network.neutron [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Refreshing network info cache for port 5615148b-36c9-40b6-9282-76bdcfb9931e {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 839.896220] env[69927]: DEBUG nova.compute.manager [req-f3b5191b-2f38-470a-9ef4-e0e299e25192 req-8c4eb4be-a295-41d7-af9b-49f5e39ba8e0 service nova] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Received event network-vif-deleted-9db3f0b7-d302-466d-93ae-16dcd9a9f682 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 839.896506] env[69927]: INFO nova.compute.manager [req-f3b5191b-2f38-470a-9ef4-e0e299e25192 req-8c4eb4be-a295-41d7-af9b-49f5e39ba8e0 service nova] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Neutron deleted interface 9db3f0b7-d302-466d-93ae-16dcd9a9f682; detaching it from the instance and deleting it from the info cache [ 839.896789] env[69927]: DEBUG nova.network.neutron [req-f3b5191b-2f38-470a-9ef4-e0e299e25192 req-8c4eb4be-a295-41d7-af9b-49f5e39ba8e0 service nova] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.903978] env[69927]: DEBUG nova.network.neutron [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Updating instance_info_cache with network_info: [{"id": "d0780c40-8a68-4d93-938c-96312b4436ec", "address": "fa:16:3e:a8:2b:33", "network": {"id": "045356e7-ce71-4c33-9121-8655a915fc1d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1277828099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2fb1fc4c3ae41a5b331c6be7973eb72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0780c40-8a", "ovs_interfaceid": "d0780c40-8a68-4d93-938c-96312b4436ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.984235] env[69927]: DEBUG nova.compute.manager [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 839.999248] env[69927]: DEBUG nova.network.neutron [-] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.025739] env[69927]: DEBUG nova.virt.hardware [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 840.025739] env[69927]: DEBUG nova.virt.hardware [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 840.025739] env[69927]: DEBUG nova.virt.hardware [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 840.026136] env[69927]: DEBUG nova.virt.hardware [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 840.026136] env[69927]: DEBUG nova.virt.hardware [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 840.026136] env[69927]: DEBUG nova.virt.hardware [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 840.026136] env[69927]: DEBUG nova.virt.hardware [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 840.026136] env[69927]: DEBUG nova.virt.hardware [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 840.026310] env[69927]: DEBUG nova.virt.hardware [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 840.026310] env[69927]: DEBUG nova.virt.hardware [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 840.026310] env[69927]: DEBUG nova.virt.hardware [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 840.027488] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2efa07a2-d087-4836-a8db-910f85284fce {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.039592] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1daafc61-9cad-47dd-b825-76f21ff01553 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.222857] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.260s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.224971] env[69927]: DEBUG oslo_concurrency.lockutils [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.027s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 840.225391] env[69927]: DEBUG nova.objects.instance [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Lazy-loading 'resources' on Instance uuid cde9885b-1aa8-411d-847e-087fe375002b {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 840.233472] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4095855, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.599156} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.234322] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] e1b3d0bc-a251-4dbd-89a6-216a2f2c1313/e1b3d0bc-a251-4dbd-89a6-216a2f2c1313.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 840.234322] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 840.237026] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0c503a58-f1b9-4afc-a73a-01ebbcc3782e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.248952] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 840.248952] env[69927]: value = "task-4095856" [ 840.248952] env[69927]: _type = "Task" [ 840.248952] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.254175] env[69927]: INFO nova.scheduler.client.report [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Deleted allocations for instance c6a06550-33ed-4fee-bd37-3fce9c55b235 [ 840.265741] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4095856, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.402657] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f5e1cd5b-116b-4d43-b6ce-d6075959b9ef {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.404943] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Releasing lock "refresh_cache-80fc9add-683b-424e-9876-cdcae664e2da" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 840.405281] env[69927]: DEBUG nova.compute.manager [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Instance network_info: |[{"id": "d0780c40-8a68-4d93-938c-96312b4436ec", "address": "fa:16:3e:a8:2b:33", "network": {"id": "045356e7-ce71-4c33-9121-8655a915fc1d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1277828099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2fb1fc4c3ae41a5b331c6be7973eb72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0780c40-8a", "ovs_interfaceid": "d0780c40-8a68-4d93-938c-96312b4436ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 840.406214] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:2b:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40c947c4-f471-4d48-8e43-fee54198107e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd0780c40-8a68-4d93-938c-96312b4436ec', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 840.414079] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 840.414782] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 840.415093] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8b721ac7-7deb-48e5-a447-67efe511384d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.438428] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d67f46-d4bc-4b4e-baa8-c73d57f0b684 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.451186] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 840.451186] env[69927]: value = "task-4095857" [ 840.451186] env[69927]: _type = "Task" [ 840.451186] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.464602] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095857, 'name': CreateVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.479788] env[69927]: DEBUG nova.compute.manager [req-f3b5191b-2f38-470a-9ef4-e0e299e25192 req-8c4eb4be-a295-41d7-af9b-49f5e39ba8e0 service nova] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Detach interface failed, port_id=9db3f0b7-d302-466d-93ae-16dcd9a9f682, reason: Instance a36b06ca-77c8-4d2f-8b43-2c160fbac93f could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 840.501955] env[69927]: INFO nova.compute.manager [-] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Took 1.47 seconds to deallocate network for instance. [ 840.613720] env[69927]: DEBUG nova.network.neutron [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Updated VIF entry in instance network info cache for port 5615148b-36c9-40b6-9282-76bdcfb9931e. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 840.614181] env[69927]: DEBUG nova.network.neutron [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Updating instance_info_cache with network_info: [{"id": "5615148b-36c9-40b6-9282-76bdcfb9931e", "address": "fa:16:3e:91:a6:13", "network": {"id": "6ec78f5d-4de4-4f02-a771-97092e489acc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1456290359-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d28bd5c5feee4a39b76694d57eb3aaf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5615148b-36", "ovs_interfaceid": "5615148b-36c9-40b6-9282-76bdcfb9931e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.758118] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4095856, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075929} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.759045] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 840.762295] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2566fab0-affc-4ad5-a351-1d515fd2cc14 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.789658] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] e1b3d0bc-a251-4dbd-89a6-216a2f2c1313/e1b3d0bc-a251-4dbd-89a6-216a2f2c1313.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 840.790393] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ca1c4034-1c83-41c7-9def-8b2d8ffaef4b tempest-ServersAdmin275Test-1017318087 tempest-ServersAdmin275Test-1017318087-project-member] Lock "c6a06550-33ed-4fee-bd37-3fce9c55b235" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.718s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.795490] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fcca1b5e-00f7-4f27-a57c-55259d97a7b5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.819559] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 840.819559] env[69927]: value = "task-4095858" [ 840.819559] env[69927]: _type = "Task" [ 840.819559] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.829906] env[69927]: DEBUG nova.network.neutron [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Successfully updated port: 3da71ace-83d3-4b37-b02c-724f53d7f8bf {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 840.831868] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4095858, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.968409] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095857, 'name': CreateVM_Task, 'duration_secs': 0.438583} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.968409] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 840.969976] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.969976] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.969976] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 840.969976] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa96fb29-da55-4030-9f16-7920f8beebf9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.978309] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 840.978309] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5284b57f-f241-6a64-1f70-ee2097c5fe38" [ 840.978309] env[69927]: _type = "Task" [ 840.978309] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.988804] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5284b57f-f241-6a64-1f70-ee2097c5fe38, 'name': SearchDatastore_Task, 'duration_secs': 0.010617} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.989150] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 840.989362] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 840.989599] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.989745] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.989923] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 840.990213] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a6fcffe-5dee-4f0f-9e18-b0c7b139496b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.003460] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 841.003672] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 841.004350] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcfb12fa-6859-4ec8-80ec-87758336558a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.013906] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.016009] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 841.016009] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52980d95-a249-45f9-d20e-1d7fa06b8be5" [ 841.016009] env[69927]: _type = "Task" [ 841.016009] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.028250] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52980d95-a249-45f9-d20e-1d7fa06b8be5, 'name': SearchDatastore_Task, 'duration_secs': 0.013213} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.029254] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a7174a9-6fb2-4d29-82fc-118ef6ad7731 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.041904] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 841.041904] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5214b677-caa6-d7f2-a4dd-bcf2df477bd1" [ 841.041904] env[69927]: _type = "Task" [ 841.041904] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.052581] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5214b677-caa6-d7f2-a4dd-bcf2df477bd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.117073] env[69927]: DEBUG oslo_concurrency.lockutils [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] Releasing lock "refresh_cache-a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 841.117400] env[69927]: DEBUG nova.compute.manager [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Received event network-changed-efc77bd5-a980-4a4e-9211-70184239a8ee {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 841.117589] env[69927]: DEBUG nova.compute.manager [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Refreshing instance network info cache due to event network-changed-efc77bd5-a980-4a4e-9211-70184239a8ee. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 841.117824] env[69927]: DEBUG oslo_concurrency.lockutils [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] Acquiring lock "refresh_cache-e0bca101-cf8d-48e1-a331-b0018548593e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.117960] env[69927]: DEBUG oslo_concurrency.lockutils [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] Acquired lock "refresh_cache-e0bca101-cf8d-48e1-a331-b0018548593e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 841.118138] env[69927]: DEBUG nova.network.neutron [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Refreshing network info cache for port efc77bd5-a980-4a4e-9211-70184239a8ee {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 841.330632] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4095858, 'name': ReconfigVM_Task, 'duration_secs': 0.376776} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.330632] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Reconfigured VM instance instance-0000002a to attach disk [datastore1] e1b3d0bc-a251-4dbd-89a6-216a2f2c1313/e1b3d0bc-a251-4dbd-89a6-216a2f2c1313.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 841.335412] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f6099570-fe3c-44ba-b61b-ef3ad1e1701b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.337667] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Acquiring lock "refresh_cache-8de4160d-2282-4ed3-bdf0-349445a6eab8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.338771] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Acquired lock "refresh_cache-8de4160d-2282-4ed3-bdf0-349445a6eab8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 841.338771] env[69927]: DEBUG nova.network.neutron [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 841.349321] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 841.349321] env[69927]: value = "task-4095859" [ 841.349321] env[69927]: _type = "Task" [ 841.349321] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.368427] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4095859, 'name': Rename_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.397379] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa0427f6-7269-4574-9d46-dfb845e67dcc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.406197] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a7e39c-009a-4bdd-a0a4-958b7d9f1fa2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.441874] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d96800-f352-4f32-b7f3-61baa9063de7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.454166] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3032e81a-58f1-482a-8e7d-62402af5aa50 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.470477] env[69927]: DEBUG nova.compute.provider_tree [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 841.556761] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5214b677-caa6-d7f2-a4dd-bcf2df477bd1, 'name': SearchDatastore_Task, 'duration_secs': 0.01334} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.557099] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 841.557383] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 80fc9add-683b-424e-9876-cdcae664e2da/80fc9add-683b-424e-9876-cdcae664e2da.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 841.557706] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4b461244-3e5d-4172-afac-78be1dd720ab {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.564934] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 841.564934] env[69927]: value = "task-4095860" [ 841.564934] env[69927]: _type = "Task" [ 841.564934] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.574615] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095860, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.860695] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4095859, 'name': Rename_Task, 'duration_secs': 0.171128} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.860983] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 841.866500] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7a6fb17a-9e04-419e-bbf9-9e468a9a2bf0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.869470] env[69927]: DEBUG nova.compute.manager [req-6e38d3b4-3d13-4711-97db-dad63828c26a req-4c9e3d5d-a2f3-4e39-81b7-adcebd768a3c service nova] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Received event network-changed-efc77bd5-a980-4a4e-9211-70184239a8ee {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 841.869654] env[69927]: DEBUG nova.compute.manager [req-6e38d3b4-3d13-4711-97db-dad63828c26a req-4c9e3d5d-a2f3-4e39-81b7-adcebd768a3c service nova] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Refreshing instance network info cache due to event network-changed-efc77bd5-a980-4a4e-9211-70184239a8ee. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 841.869841] env[69927]: DEBUG oslo_concurrency.lockutils [req-6e38d3b4-3d13-4711-97db-dad63828c26a req-4c9e3d5d-a2f3-4e39-81b7-adcebd768a3c service nova] Acquiring lock "refresh_cache-e0bca101-cf8d-48e1-a331-b0018548593e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.876623] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 841.876623] env[69927]: value = "task-4095861" [ 841.876623] env[69927]: _type = "Task" [ 841.876623] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.889946] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4095861, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.908405] env[69927]: DEBUG nova.network.neutron [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 841.949911] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6a445427-5cc8-4732-a1bc-25af3b8c91ab tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquiring lock "a36b06ca-77c8-4d2f-8b43-2c160fbac93f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.973200] env[69927]: DEBUG nova.network.neutron [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Updated VIF entry in instance network info cache for port efc77bd5-a980-4a4e-9211-70184239a8ee. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 841.973390] env[69927]: DEBUG nova.network.neutron [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Updating instance_info_cache with network_info: [{"id": "efc77bd5-a980-4a4e-9211-70184239a8ee", "address": "fa:16:3e:a4:62:f3", "network": {"id": "8b41ee82-5412-4d31-ae76-47e8663487eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-523653041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66cbe813421e40d1bd515411bc3c045a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ffb921-796a-40fe-9662-d3fc01547dcb", "external-id": "nsx-vlan-transportzone-331", "segmentation_id": 331, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefc77bd5-a9", "ovs_interfaceid": "efc77bd5-a980-4a4e-9211-70184239a8ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.008409] env[69927]: DEBUG nova.scheduler.client.report [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Updated inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 with generation 69 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 842.008749] env[69927]: DEBUG nova.compute.provider_tree [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Updating resource provider 2f529b36-df5f-4b37-8103-68f74f737726 generation from 69 to 70 during operation: update_inventory {{(pid=69927) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 842.008940] env[69927]: DEBUG nova.compute.provider_tree [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 842.077879] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095860, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.083972] env[69927]: DEBUG nova.compute.manager [req-c8b9af39-d97f-4696-8cc1-1a42027d9807 req-0311b784-2524-4da5-ba6d-ca3aecc632e2 service nova] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Received event network-vif-plugged-3da71ace-83d3-4b37-b02c-724f53d7f8bf {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 842.084585] env[69927]: DEBUG oslo_concurrency.lockutils [req-c8b9af39-d97f-4696-8cc1-1a42027d9807 req-0311b784-2524-4da5-ba6d-ca3aecc632e2 service nova] Acquiring lock "8de4160d-2282-4ed3-bdf0-349445a6eab8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.084585] env[69927]: DEBUG oslo_concurrency.lockutils [req-c8b9af39-d97f-4696-8cc1-1a42027d9807 req-0311b784-2524-4da5-ba6d-ca3aecc632e2 service nova] Lock "8de4160d-2282-4ed3-bdf0-349445a6eab8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.084585] env[69927]: DEBUG oslo_concurrency.lockutils [req-c8b9af39-d97f-4696-8cc1-1a42027d9807 req-0311b784-2524-4da5-ba6d-ca3aecc632e2 service nova] Lock "8de4160d-2282-4ed3-bdf0-349445a6eab8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.085372] env[69927]: DEBUG nova.compute.manager [req-c8b9af39-d97f-4696-8cc1-1a42027d9807 req-0311b784-2524-4da5-ba6d-ca3aecc632e2 service nova] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] No waiting events found dispatching network-vif-plugged-3da71ace-83d3-4b37-b02c-724f53d7f8bf {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 842.085372] env[69927]: WARNING nova.compute.manager [req-c8b9af39-d97f-4696-8cc1-1a42027d9807 req-0311b784-2524-4da5-ba6d-ca3aecc632e2 service nova] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Received unexpected event network-vif-plugged-3da71ace-83d3-4b37-b02c-724f53d7f8bf for instance with vm_state building and task_state spawning. [ 842.085372] env[69927]: DEBUG nova.compute.manager [req-c8b9af39-d97f-4696-8cc1-1a42027d9807 req-0311b784-2524-4da5-ba6d-ca3aecc632e2 service nova] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Received event network-changed-3da71ace-83d3-4b37-b02c-724f53d7f8bf {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 842.085372] env[69927]: DEBUG nova.compute.manager [req-c8b9af39-d97f-4696-8cc1-1a42027d9807 req-0311b784-2524-4da5-ba6d-ca3aecc632e2 service nova] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Refreshing instance network info cache due to event network-changed-3da71ace-83d3-4b37-b02c-724f53d7f8bf. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 842.085548] env[69927]: DEBUG oslo_concurrency.lockutils [req-c8b9af39-d97f-4696-8cc1-1a42027d9807 req-0311b784-2524-4da5-ba6d-ca3aecc632e2 service nova] Acquiring lock "refresh_cache-8de4160d-2282-4ed3-bdf0-349445a6eab8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.110259] env[69927]: DEBUG nova.network.neutron [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Updating instance_info_cache with network_info: [{"id": "3da71ace-83d3-4b37-b02c-724f53d7f8bf", "address": "fa:16:3e:14:81:fa", "network": {"id": "53de9e43-eb8a-489d-9cc9-7838d268534d", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1768040524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53d4d694901e4d8188e06ccec88b4a1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3da71ace-83", "ovs_interfaceid": "3da71ace-83d3-4b37-b02c-724f53d7f8bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.392111] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4095861, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.479377] env[69927]: DEBUG oslo_concurrency.lockutils [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] Releasing lock "refresh_cache-e0bca101-cf8d-48e1-a331-b0018548593e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.479662] env[69927]: DEBUG nova.compute.manager [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Received event network-vif-plugged-d0780c40-8a68-4d93-938c-96312b4436ec {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 842.479912] env[69927]: DEBUG oslo_concurrency.lockutils [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] Acquiring lock "80fc9add-683b-424e-9876-cdcae664e2da-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.480843] env[69927]: DEBUG oslo_concurrency.lockutils [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] Lock "80fc9add-683b-424e-9876-cdcae664e2da-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.480843] env[69927]: DEBUG oslo_concurrency.lockutils [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] Lock "80fc9add-683b-424e-9876-cdcae664e2da-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.480843] env[69927]: DEBUG nova.compute.manager [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] No waiting events found dispatching network-vif-plugged-d0780c40-8a68-4d93-938c-96312b4436ec {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 842.480843] env[69927]: WARNING nova.compute.manager [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Received unexpected event network-vif-plugged-d0780c40-8a68-4d93-938c-96312b4436ec for instance with vm_state building and task_state spawning. [ 842.481058] env[69927]: DEBUG nova.compute.manager [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Received event network-changed-d0780c40-8a68-4d93-938c-96312b4436ec {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 842.481255] env[69927]: DEBUG nova.compute.manager [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Refreshing instance network info cache due to event network-changed-d0780c40-8a68-4d93-938c-96312b4436ec. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 842.481476] env[69927]: DEBUG oslo_concurrency.lockutils [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] Acquiring lock "refresh_cache-80fc9add-683b-424e-9876-cdcae664e2da" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.481633] env[69927]: DEBUG oslo_concurrency.lockutils [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] Acquired lock "refresh_cache-80fc9add-683b-424e-9876-cdcae664e2da" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.481947] env[69927]: DEBUG nova.network.neutron [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Refreshing network info cache for port d0780c40-8a68-4d93-938c-96312b4436ec {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 842.483864] env[69927]: DEBUG oslo_concurrency.lockutils [req-6e38d3b4-3d13-4711-97db-dad63828c26a req-4c9e3d5d-a2f3-4e39-81b7-adcebd768a3c service nova] Acquired lock "refresh_cache-e0bca101-cf8d-48e1-a331-b0018548593e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.483864] env[69927]: DEBUG nova.network.neutron [req-6e38d3b4-3d13-4711-97db-dad63828c26a req-4c9e3d5d-a2f3-4e39-81b7-adcebd768a3c service nova] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Refreshing network info cache for port efc77bd5-a980-4a4e-9211-70184239a8ee {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 842.521087] env[69927]: DEBUG oslo_concurrency.lockutils [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.296s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.524861] env[69927]: DEBUG oslo_concurrency.lockutils [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.297s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.525676] env[69927]: INFO nova.compute.claims [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 842.580942] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095860, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.584891] env[69927]: INFO nova.scheduler.client.report [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Deleted allocations for instance cde9885b-1aa8-411d-847e-087fe375002b [ 842.615387] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Releasing lock "refresh_cache-8de4160d-2282-4ed3-bdf0-349445a6eab8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.617208] env[69927]: DEBUG nova.compute.manager [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Instance network_info: |[{"id": "3da71ace-83d3-4b37-b02c-724f53d7f8bf", "address": "fa:16:3e:14:81:fa", "network": {"id": "53de9e43-eb8a-489d-9cc9-7838d268534d", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1768040524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53d4d694901e4d8188e06ccec88b4a1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3da71ace-83", "ovs_interfaceid": "3da71ace-83d3-4b37-b02c-724f53d7f8bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 842.617208] env[69927]: DEBUG oslo_concurrency.lockutils [req-c8b9af39-d97f-4696-8cc1-1a42027d9807 req-0311b784-2524-4da5-ba6d-ca3aecc632e2 service nova] Acquired lock "refresh_cache-8de4160d-2282-4ed3-bdf0-349445a6eab8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.617420] env[69927]: DEBUG nova.network.neutron [req-c8b9af39-d97f-4696-8cc1-1a42027d9807 req-0311b784-2524-4da5-ba6d-ca3aecc632e2 service nova] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Refreshing network info cache for port 3da71ace-83d3-4b37-b02c-724f53d7f8bf {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 842.617513] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:81:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52f465cb-7418-4172-bd7d-aec00abeb692', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3da71ace-83d3-4b37-b02c-724f53d7f8bf', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 842.628028] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Creating folder: Project (53d4d694901e4d8188e06ccec88b4a1a). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 842.633434] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a4bd2e4f-55f0-4986-a368-b7920a57e592 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.647566] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Created folder: Project (53d4d694901e4d8188e06ccec88b4a1a) in parent group-v811283. [ 842.647789] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Creating folder: Instances. Parent ref: group-v811421. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 842.648067] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8d40c144-d8b6-444f-a40a-e9939b22545d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.661750] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Created folder: Instances in parent group-v811421. [ 842.661750] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 842.661750] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 842.661750] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6b908ff5-c7cc-4f30-9fb0-ca7ef3bde348 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.689236] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 842.689236] env[69927]: value = "task-4095864" [ 842.689236] env[69927]: _type = "Task" [ 842.689236] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.699742] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095864, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.892831] env[69927]: DEBUG oslo_vmware.api [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4095861, 'name': PowerOnVM_Task, 'duration_secs': 0.701619} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.893414] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 842.893476] env[69927]: INFO nova.compute.manager [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Took 9.38 seconds to spawn the instance on the hypervisor. [ 842.893730] env[69927]: DEBUG nova.compute.manager [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 842.894627] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f783f55-a5a3-478e-9dad-42d9dc2a31fc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.083145] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095860, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.099181] env[69927]: DEBUG oslo_concurrency.lockutils [None req-adfe37e0-81fd-4891-a7ae-8fb3171f84d4 tempest-ServerGroupTestJSON-635802753 tempest-ServerGroupTestJSON-635802753-project-member] Lock "cde9885b-1aa8-411d-847e-087fe375002b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.531s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 843.200857] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095864, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.430838] env[69927]: INFO nova.compute.manager [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Took 53.35 seconds to build instance. [ 843.436611] env[69927]: DEBUG nova.network.neutron [req-6e38d3b4-3d13-4711-97db-dad63828c26a req-4c9e3d5d-a2f3-4e39-81b7-adcebd768a3c service nova] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Updated VIF entry in instance network info cache for port efc77bd5-a980-4a4e-9211-70184239a8ee. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 843.437095] env[69927]: DEBUG nova.network.neutron [req-6e38d3b4-3d13-4711-97db-dad63828c26a req-4c9e3d5d-a2f3-4e39-81b7-adcebd768a3c service nova] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Updating instance_info_cache with network_info: [{"id": "efc77bd5-a980-4a4e-9211-70184239a8ee", "address": "fa:16:3e:a4:62:f3", "network": {"id": "8b41ee82-5412-4d31-ae76-47e8663487eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-523653041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66cbe813421e40d1bd515411bc3c045a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ffb921-796a-40fe-9662-d3fc01547dcb", "external-id": "nsx-vlan-transportzone-331", "segmentation_id": 331, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefc77bd5-a9", "ovs_interfaceid": "efc77bd5-a980-4a4e-9211-70184239a8ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.449149] env[69927]: DEBUG nova.network.neutron [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Updated VIF entry in instance network info cache for port d0780c40-8a68-4d93-938c-96312b4436ec. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 843.449517] env[69927]: DEBUG nova.network.neutron [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Updating instance_info_cache with network_info: [{"id": "d0780c40-8a68-4d93-938c-96312b4436ec", "address": "fa:16:3e:a8:2b:33", "network": {"id": "045356e7-ce71-4c33-9121-8655a915fc1d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1277828099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2fb1fc4c3ae41a5b331c6be7973eb72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0780c40-8a", "ovs_interfaceid": "d0780c40-8a68-4d93-938c-96312b4436ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.584917] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095860, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.86375} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.589504] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 80fc9add-683b-424e-9876-cdcae664e2da/80fc9add-683b-424e-9876-cdcae664e2da.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 843.589718] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 843.590275] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aa7cbe1c-6265-44e4-aefa-b982ffce94a6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.598614] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 843.598614] env[69927]: value = "task-4095865" [ 843.598614] env[69927]: _type = "Task" [ 843.598614] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.612621] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095865, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.649998] env[69927]: DEBUG nova.network.neutron [req-c8b9af39-d97f-4696-8cc1-1a42027d9807 req-0311b784-2524-4da5-ba6d-ca3aecc632e2 service nova] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Updated VIF entry in instance network info cache for port 3da71ace-83d3-4b37-b02c-724f53d7f8bf. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 843.650408] env[69927]: DEBUG nova.network.neutron [req-c8b9af39-d97f-4696-8cc1-1a42027d9807 req-0311b784-2524-4da5-ba6d-ca3aecc632e2 service nova] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Updating instance_info_cache with network_info: [{"id": "3da71ace-83d3-4b37-b02c-724f53d7f8bf", "address": "fa:16:3e:14:81:fa", "network": {"id": "53de9e43-eb8a-489d-9cc9-7838d268534d", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1768040524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53d4d694901e4d8188e06ccec88b4a1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3da71ace-83", "ovs_interfaceid": "3da71ace-83d3-4b37-b02c-724f53d7f8bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.701874] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095864, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.934970] env[69927]: DEBUG oslo_concurrency.lockutils [None req-04fa74e7-e254-4572-a7ca-1f493dd899bb tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "e1b3d0bc-a251-4dbd-89a6-216a2f2c1313" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 109.557s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 843.942313] env[69927]: DEBUG oslo_concurrency.lockutils [req-6e38d3b4-3d13-4711-97db-dad63828c26a req-4c9e3d5d-a2f3-4e39-81b7-adcebd768a3c service nova] Releasing lock "refresh_cache-e0bca101-cf8d-48e1-a331-b0018548593e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 843.942596] env[69927]: DEBUG nova.compute.manager [req-6e38d3b4-3d13-4711-97db-dad63828c26a req-4c9e3d5d-a2f3-4e39-81b7-adcebd768a3c service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Received event network-changed-5615148b-36c9-40b6-9282-76bdcfb9931e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 843.942755] env[69927]: DEBUG nova.compute.manager [req-6e38d3b4-3d13-4711-97db-dad63828c26a req-4c9e3d5d-a2f3-4e39-81b7-adcebd768a3c service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Refreshing instance network info cache due to event network-changed-5615148b-36c9-40b6-9282-76bdcfb9931e. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 843.942969] env[69927]: DEBUG oslo_concurrency.lockutils [req-6e38d3b4-3d13-4711-97db-dad63828c26a req-4c9e3d5d-a2f3-4e39-81b7-adcebd768a3c service nova] Acquiring lock "refresh_cache-a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.943119] env[69927]: DEBUG oslo_concurrency.lockutils [req-6e38d3b4-3d13-4711-97db-dad63828c26a req-4c9e3d5d-a2f3-4e39-81b7-adcebd768a3c service nova] Acquired lock "refresh_cache-a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 843.943279] env[69927]: DEBUG nova.network.neutron [req-6e38d3b4-3d13-4711-97db-dad63828c26a req-4c9e3d5d-a2f3-4e39-81b7-adcebd768a3c service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Refreshing network info cache for port 5615148b-36c9-40b6-9282-76bdcfb9931e {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 843.955395] env[69927]: DEBUG oslo_concurrency.lockutils [req-a51778ea-eac6-46ba-b511-7ac1d3f8e1af req-30157aab-9d09-4664-b4b4-30c098891216 service nova] Releasing lock "refresh_cache-80fc9add-683b-424e-9876-cdcae664e2da" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 844.111389] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095865, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.286091} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.112213] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 844.113010] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8827ab88-9f7b-4b9f-b450-15cdc5559b1f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.116607] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9685e273-e026-4167-b76f-256616375c82 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.136374] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba49b5c-2cd4-4872-ba77-790cb09036b7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.148690] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] 80fc9add-683b-424e-9876-cdcae664e2da/80fc9add-683b-424e-9876-cdcae664e2da.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 844.149465] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-917ab917-2371-4277-b5b0-630eb64c9295 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.166189] env[69927]: DEBUG oslo_concurrency.lockutils [req-c8b9af39-d97f-4696-8cc1-1a42027d9807 req-0311b784-2524-4da5-ba6d-ca3aecc632e2 service nova] Releasing lock "refresh_cache-8de4160d-2282-4ed3-bdf0-349445a6eab8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 844.206737] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e03c573-661d-4842-8571-e09c360104ed {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.210612] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 844.210612] env[69927]: value = "task-4095866" [ 844.210612] env[69927]: _type = "Task" [ 844.210612] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.221610] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095864, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.223515] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55cc1693-9b26-41d6-baec-96a5b78830c2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.231624] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095866, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.242794] env[69927]: DEBUG nova.compute.provider_tree [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 844.438018] env[69927]: DEBUG nova.compute.manager [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 844.525509] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Acquiring lock "c3a531fd-647c-43b6-9d3d-fc6ecbc2445e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 844.525509] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Lock "c3a531fd-647c-43b6-9d3d-fc6ecbc2445e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.714022] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095864, 'name': CreateVM_Task, 'duration_secs': 1.725614} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.715022] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 844.718021] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.718021] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 844.718021] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 844.718021] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19b66994-4136-43bd-ac7e-10bdf01cdfad {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.728026] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095866, 'name': ReconfigVM_Task, 'duration_secs': 0.393146} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.728026] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Reconfigured VM instance instance-0000002b to attach disk [datastore1] 80fc9add-683b-424e-9876-cdcae664e2da/80fc9add-683b-424e-9876-cdcae664e2da.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 844.728864] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-798d1b8b-a98a-4fa1-977f-43e05d1afbb0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.732445] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Waiting for the task: (returnval){ [ 844.732445] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52542e46-c64d-1825-e621-3c61c1fd0748" [ 844.732445] env[69927]: _type = "Task" [ 844.732445] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.740582] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 844.740582] env[69927]: value = "task-4095867" [ 844.740582] env[69927]: _type = "Task" [ 844.740582] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.750466] env[69927]: DEBUG nova.scheduler.client.report [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 844.754345] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52542e46-c64d-1825-e621-3c61c1fd0748, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.765116] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095867, 'name': Rename_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.806571] env[69927]: DEBUG nova.compute.manager [req-72f6f666-91f9-4cfc-8f95-f8544752e8ed req-9fb0c7d5-8d38-4348-a1d7-0c556de5eb3c service nova] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Received event network-changed-b347a042-35ea-41f5-a96e-84e4553f55d2 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 844.806765] env[69927]: DEBUG nova.compute.manager [req-72f6f666-91f9-4cfc-8f95-f8544752e8ed req-9fb0c7d5-8d38-4348-a1d7-0c556de5eb3c service nova] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Refreshing instance network info cache due to event network-changed-b347a042-35ea-41f5-a96e-84e4553f55d2. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 844.806982] env[69927]: DEBUG oslo_concurrency.lockutils [req-72f6f666-91f9-4cfc-8f95-f8544752e8ed req-9fb0c7d5-8d38-4348-a1d7-0c556de5eb3c service nova] Acquiring lock "refresh_cache-e1b3d0bc-a251-4dbd-89a6-216a2f2c1313" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.807623] env[69927]: DEBUG oslo_concurrency.lockutils [req-72f6f666-91f9-4cfc-8f95-f8544752e8ed req-9fb0c7d5-8d38-4348-a1d7-0c556de5eb3c service nova] Acquired lock "refresh_cache-e1b3d0bc-a251-4dbd-89a6-216a2f2c1313" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 844.807788] env[69927]: DEBUG nova.network.neutron [req-72f6f666-91f9-4cfc-8f95-f8544752e8ed req-9fb0c7d5-8d38-4348-a1d7-0c556de5eb3c service nova] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Refreshing network info cache for port b347a042-35ea-41f5-a96e-84e4553f55d2 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 844.812450] env[69927]: DEBUG nova.network.neutron [req-6e38d3b4-3d13-4711-97db-dad63828c26a req-4c9e3d5d-a2f3-4e39-81b7-adcebd768a3c service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Updated VIF entry in instance network info cache for port 5615148b-36c9-40b6-9282-76bdcfb9931e. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 844.812819] env[69927]: DEBUG nova.network.neutron [req-6e38d3b4-3d13-4711-97db-dad63828c26a req-4c9e3d5d-a2f3-4e39-81b7-adcebd768a3c service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Updating instance_info_cache with network_info: [{"id": "5615148b-36c9-40b6-9282-76bdcfb9931e", "address": "fa:16:3e:91:a6:13", "network": {"id": "6ec78f5d-4de4-4f02-a771-97092e489acc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1456290359-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d28bd5c5feee4a39b76694d57eb3aaf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5615148b-36", "ovs_interfaceid": "5615148b-36c9-40b6-9282-76bdcfb9931e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.974492] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 845.248736] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52542e46-c64d-1825-e621-3c61c1fd0748, 'name': SearchDatastore_Task, 'duration_secs': 0.012514} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.249267] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 845.249374] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 845.249713] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.250301] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.250424] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 845.251390] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a0e57b6-0167-48dc-a4c2-aa2f365719a3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.257424] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095867, 'name': Rename_Task, 'duration_secs': 0.319958} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.258298] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 845.258642] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-00a8d095-bcc1-4450-8406-27091ae99f94 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.261072] env[69927]: DEBUG oslo_concurrency.lockutils [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.738s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 845.263587] env[69927]: DEBUG nova.compute.manager [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 845.271032] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.449s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 845.271032] env[69927]: INFO nova.compute.claims [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 845.273581] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 845.273926] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 845.275169] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3314b689-de8d-4dad-a885-d6653adb165a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.281212] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 845.281212] env[69927]: value = "task-4095868" [ 845.281212] env[69927]: _type = "Task" [ 845.281212] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.287848] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Waiting for the task: (returnval){ [ 845.287848] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529a5fcc-152b-8f64-ba23-43e22e04e5d8" [ 845.287848] env[69927]: _type = "Task" [ 845.287848] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.311027] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095868, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.324579] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529a5fcc-152b-8f64-ba23-43e22e04e5d8, 'name': SearchDatastore_Task, 'duration_secs': 0.01188} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.325194] env[69927]: DEBUG oslo_concurrency.lockutils [req-6e38d3b4-3d13-4711-97db-dad63828c26a req-4c9e3d5d-a2f3-4e39-81b7-adcebd768a3c service nova] Releasing lock "refresh_cache-a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 845.327229] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a6e4224-c4f4-4a94-b50a-fadc7bead0bf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.336265] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Waiting for the task: (returnval){ [ 845.336265] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5258d54b-f7d3-5057-5a4d-1a9a1a1c8315" [ 845.336265] env[69927]: _type = "Task" [ 845.336265] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.347687] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5258d54b-f7d3-5057-5a4d-1a9a1a1c8315, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.776809] env[69927]: DEBUG nova.compute.utils [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 845.781682] env[69927]: DEBUG nova.compute.manager [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 845.781857] env[69927]: DEBUG nova.network.neutron [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 845.801019] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095868, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.850736] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5258d54b-f7d3-5057-5a4d-1a9a1a1c8315, 'name': SearchDatastore_Task, 'duration_secs': 0.012318} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.851205] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 845.851529] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 8de4160d-2282-4ed3-bdf0-349445a6eab8/8de4160d-2282-4ed3-bdf0-349445a6eab8.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 845.851988] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2f9e258e-6129-4489-af08-b312208aa874 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.859343] env[69927]: DEBUG nova.policy [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1bae805d651248408735b2279fe945a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2079784fef244b8883f56f9cfeaf2a6d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 845.860157] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Waiting for the task: (returnval){ [ 845.860157] env[69927]: value = "task-4095869" [ 845.860157] env[69927]: _type = "Task" [ 845.860157] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.871210] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Task: {'id': task-4095869, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.994625] env[69927]: DEBUG nova.network.neutron [req-72f6f666-91f9-4cfc-8f95-f8544752e8ed req-9fb0c7d5-8d38-4348-a1d7-0c556de5eb3c service nova] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Updated VIF entry in instance network info cache for port b347a042-35ea-41f5-a96e-84e4553f55d2. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 845.995092] env[69927]: DEBUG nova.network.neutron [req-72f6f666-91f9-4cfc-8f95-f8544752e8ed req-9fb0c7d5-8d38-4348-a1d7-0c556de5eb3c service nova] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Updating instance_info_cache with network_info: [{"id": "b347a042-35ea-41f5-a96e-84e4553f55d2", "address": "fa:16:3e:10:1a:38", "network": {"id": "095e5f02-dbfd-4d96-bd31-c8c5e870e449", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-240997183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "71a47794e5824701925ad4bdc3651196", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7b7edd0-124a-48ec-ae26-1aa14f9b884a", "external-id": "nsx-vlan-transportzone-861", "segmentation_id": 861, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb347a042-35", "ovs_interfaceid": "b347a042-35ea-41f5-a96e-84e4553f55d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.282914] env[69927]: DEBUG nova.compute.manager [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 846.318183] env[69927]: DEBUG oslo_vmware.api [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095868, 'name': PowerOnVM_Task, 'duration_secs': 0.878462} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.318183] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 846.318183] env[69927]: INFO nova.compute.manager [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Took 9.02 seconds to spawn the instance on the hypervisor. [ 846.318183] env[69927]: DEBUG nova.compute.manager [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 846.319550] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0ad6c87-9e5f-416c-8649-1986b73635b9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.334553] env[69927]: DEBUG nova.network.neutron [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Successfully created port: d61e5620-93c1-42e0-b372-c977dbc31d1c {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 846.378179] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Task: {'id': task-4095869, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.499410] env[69927]: DEBUG oslo_concurrency.lockutils [req-72f6f666-91f9-4cfc-8f95-f8544752e8ed req-9fb0c7d5-8d38-4348-a1d7-0c556de5eb3c service nova] Releasing lock "refresh_cache-e1b3d0bc-a251-4dbd-89a6-216a2f2c1313" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.848714] env[69927]: INFO nova.compute.manager [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Took 44.76 seconds to build instance. [ 846.873750] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Task: {'id': task-4095869, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.753578} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.874709] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 8de4160d-2282-4ed3-bdf0-349445a6eab8/8de4160d-2282-4ed3-bdf0-349445a6eab8.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 846.874965] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 846.875256] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bebac244-29bc-45e5-9814-b4ecc6176152 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.883962] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Waiting for the task: (returnval){ [ 846.883962] env[69927]: value = "task-4095870" [ 846.883962] env[69927]: _type = "Task" [ 846.883962] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.894496] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Task: {'id': task-4095870, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.949565] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89848421-139b-4982-bdcd-e5d44835f6fe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.959237] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b71b8c4c-a2f9-4bac-81c6-2f9a30a07f65 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.012461] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4950de53-4880-4105-8e15-9119f087a6c4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.018281] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f139fa27-e111-4c09-b8f0-ad33eef67ac9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.034401] env[69927]: DEBUG nova.compute.provider_tree [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 847.304060] env[69927]: DEBUG nova.compute.manager [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 847.328594] env[69927]: DEBUG nova.virt.hardware [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 847.328850] env[69927]: DEBUG nova.virt.hardware [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 847.329014] env[69927]: DEBUG nova.virt.hardware [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 847.329204] env[69927]: DEBUG nova.virt.hardware [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 847.329348] env[69927]: DEBUG nova.virt.hardware [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 847.329495] env[69927]: DEBUG nova.virt.hardware [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 847.329762] env[69927]: DEBUG nova.virt.hardware [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 847.331511] env[69927]: DEBUG nova.virt.hardware [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 847.331511] env[69927]: DEBUG nova.virt.hardware [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 847.331511] env[69927]: DEBUG nova.virt.hardware [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 847.331511] env[69927]: DEBUG nova.virt.hardware [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 847.331511] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc81d9eb-1569-4733-9712-e75d0dbfccdc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.340588] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2126e727-69fc-494b-81e8-9e9598c68422 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.356128] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4b821e53-a0eb-445b-83ae-11a479d177ac tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "80fc9add-683b-424e-9876-cdcae664e2da" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 110.126s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.394822] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Task: {'id': task-4095870, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.184027} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.398200] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 847.398200] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f541a27-e755-4290-bc22-a70d990151d3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.419507] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] 8de4160d-2282-4ed3-bdf0-349445a6eab8/8de4160d-2282-4ed3-bdf0-349445a6eab8.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 847.420307] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81bbbadf-2740-41fc-bf12-518405ac2402 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.441912] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Waiting for the task: (returnval){ [ 847.441912] env[69927]: value = "task-4095871" [ 847.441912] env[69927]: _type = "Task" [ 847.441912] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.457181] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Task: {'id': task-4095871, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.543662] env[69927]: DEBUG nova.scheduler.client.report [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 847.628709] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-497f71db-0dbe-4388-9429-f07b396fbb11 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.642516] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c14a6d10-ed98-4e06-9625-d5ddc5455ce4 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Suspending the VM {{(pid=69927) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 847.643839] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-8343b6cb-4613-46a4-8782-6a294f46e135 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.654135] env[69927]: DEBUG oslo_vmware.api [None req-c14a6d10-ed98-4e06-9625-d5ddc5455ce4 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 847.654135] env[69927]: value = "task-4095872" [ 847.654135] env[69927]: _type = "Task" [ 847.654135] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.668124] env[69927]: DEBUG oslo_vmware.api [None req-c14a6d10-ed98-4e06-9625-d5ddc5455ce4 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095872, 'name': SuspendVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.861877] env[69927]: DEBUG nova.compute.manager [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 847.958039] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Task: {'id': task-4095871, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.049990] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.782s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.050835] env[69927]: DEBUG nova.compute.manager [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 848.056604] env[69927]: DEBUG oslo_concurrency.lockutils [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.774s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 848.056947] env[69927]: DEBUG nova.objects.instance [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lazy-loading 'resources' on Instance uuid 915797c5-6f68-4355-a6b0-ad2b06b826cb {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 848.168411] env[69927]: DEBUG oslo_vmware.api [None req-c14a6d10-ed98-4e06-9625-d5ddc5455ce4 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095872, 'name': SuspendVM_Task} progress is 45%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.189780] env[69927]: DEBUG nova.network.neutron [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Successfully updated port: d61e5620-93c1-42e0-b372-c977dbc31d1c {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 848.224224] env[69927]: DEBUG nova.compute.manager [req-a04c0f20-e655-406f-82e6-64d51e9f3a0a req-a3d6246f-dfb4-4879-a178-2ce3fb148391 service nova] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Received event network-vif-plugged-d61e5620-93c1-42e0-b372-c977dbc31d1c {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 848.224224] env[69927]: DEBUG oslo_concurrency.lockutils [req-a04c0f20-e655-406f-82e6-64d51e9f3a0a req-a3d6246f-dfb4-4879-a178-2ce3fb148391 service nova] Acquiring lock "c7451ca3-f1fc-469b-b9d2-7fe24cb8949e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.225558] env[69927]: DEBUG oslo_concurrency.lockutils [req-a04c0f20-e655-406f-82e6-64d51e9f3a0a req-a3d6246f-dfb4-4879-a178-2ce3fb148391 service nova] Lock "c7451ca3-f1fc-469b-b9d2-7fe24cb8949e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 848.225876] env[69927]: DEBUG oslo_concurrency.lockutils [req-a04c0f20-e655-406f-82e6-64d51e9f3a0a req-a3d6246f-dfb4-4879-a178-2ce3fb148391 service nova] Lock "c7451ca3-f1fc-469b-b9d2-7fe24cb8949e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.226147] env[69927]: DEBUG nova.compute.manager [req-a04c0f20-e655-406f-82e6-64d51e9f3a0a req-a3d6246f-dfb4-4879-a178-2ce3fb148391 service nova] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] No waiting events found dispatching network-vif-plugged-d61e5620-93c1-42e0-b372-c977dbc31d1c {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 848.226333] env[69927]: WARNING nova.compute.manager [req-a04c0f20-e655-406f-82e6-64d51e9f3a0a req-a3d6246f-dfb4-4879-a178-2ce3fb148391 service nova] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Received unexpected event network-vif-plugged-d61e5620-93c1-42e0-b372-c977dbc31d1c for instance with vm_state building and task_state spawning. [ 848.395162] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.398880] env[69927]: DEBUG oslo_concurrency.lockutils [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquiring lock "e1946033-4ec3-4561-afdf-a3b748f7c611" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.399546] env[69927]: DEBUG oslo_concurrency.lockutils [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Lock "e1946033-4ec3-4561-afdf-a3b748f7c611" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 848.454354] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Task: {'id': task-4095871, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.561008] env[69927]: DEBUG nova.compute.utils [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 848.562747] env[69927]: DEBUG nova.compute.manager [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 848.562992] env[69927]: DEBUG nova.network.neutron [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 848.644312] env[69927]: DEBUG nova.policy [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '44343ce64a174585ac19f26149f9a480', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5e5c81a6491a4090b807b7328df7d8ae', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 848.668921] env[69927]: DEBUG oslo_vmware.api [None req-c14a6d10-ed98-4e06-9625-d5ddc5455ce4 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095872, 'name': SuspendVM_Task, 'duration_secs': 0.808805} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.669170] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c14a6d10-ed98-4e06-9625-d5ddc5455ce4 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Suspended the VM {{(pid=69927) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 848.669401] env[69927]: DEBUG nova.compute.manager [None req-c14a6d10-ed98-4e06-9625-d5ddc5455ce4 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 848.670208] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b6719f-1c86-4fef-807c-50190c80f2e7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.690549] env[69927]: DEBUG oslo_concurrency.lockutils [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Acquiring lock "refresh_cache-c7451ca3-f1fc-469b-b9d2-7fe24cb8949e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.690695] env[69927]: DEBUG oslo_concurrency.lockutils [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Acquired lock "refresh_cache-c7451ca3-f1fc-469b-b9d2-7fe24cb8949e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 848.691170] env[69927]: DEBUG nova.network.neutron [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 848.964037] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Task: {'id': task-4095871, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.071118] env[69927]: DEBUG nova.compute.manager [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 849.075064] env[69927]: DEBUG nova.network.neutron [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Successfully created port: ff1455ee-4ab2-4e4d-ac72-6a6554002936 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 849.083867] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c3aab598-8503-474a-b670-d119f52f7579 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "f6972b90-7746-4a37-8be8-1739f96dc3dc" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.084202] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c3aab598-8503-474a-b670-d119f52f7579 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "f6972b90-7746-4a37-8be8-1739f96dc3dc" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.171256] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e427d20a-5346-4369-af90-4e6765a773d0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.189265] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593de1c0-0051-40a2-8f36-66f86c937944 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.229622] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c17ff95e-0061-4d48-9c9e-21c59eb632c4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.239318] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50623b2a-7e91-4d23-a963-7a201f011625 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.253881] env[69927]: DEBUG nova.compute.provider_tree [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 849.255993] env[69927]: DEBUG nova.network.neutron [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 849.459189] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Task: {'id': task-4095871, 'name': ReconfigVM_Task, 'duration_secs': 1.633815} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.461855] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Reconfigured VM instance instance-0000002c to attach disk [datastore1] 8de4160d-2282-4ed3-bdf0-349445a6eab8/8de4160d-2282-4ed3-bdf0-349445a6eab8.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 849.464427] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6e0beb64-224d-4b81-9c5a-7f0f9401f3a9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.466948] env[69927]: DEBUG oslo_concurrency.lockutils [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Acquiring lock "9363c664-5848-408b-9b03-2dea4ceded90" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.467423] env[69927]: DEBUG oslo_concurrency.lockutils [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Lock "9363c664-5848-408b-9b03-2dea4ceded90" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.469174] env[69927]: DEBUG nova.network.neutron [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Updating instance_info_cache with network_info: [{"id": "d61e5620-93c1-42e0-b372-c977dbc31d1c", "address": "fa:16:3e:e2:d2:f2", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd61e5620-93", "ovs_interfaceid": "d61e5620-93c1-42e0-b372-c977dbc31d1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.479039] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Waiting for the task: (returnval){ [ 849.479039] env[69927]: value = "task-4095873" [ 849.479039] env[69927]: _type = "Task" [ 849.479039] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.489736] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Task: {'id': task-4095873, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.587311] env[69927]: INFO nova.compute.manager [None req-c3aab598-8503-474a-b670-d119f52f7579 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Detaching volume b38117da-190d-46b9-8928-95fc2ddfa1bc [ 849.640265] env[69927]: INFO nova.virt.block_device [None req-c3aab598-8503-474a-b670-d119f52f7579 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Attempting to driver detach volume b38117da-190d-46b9-8928-95fc2ddfa1bc from mountpoint /dev/sdb [ 849.640886] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3aab598-8503-474a-b670-d119f52f7579 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Volume detach. Driver type: vmdk {{(pid=69927) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 849.640886] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3aab598-8503-474a-b670-d119f52f7579 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811397', 'volume_id': 'b38117da-190d-46b9-8928-95fc2ddfa1bc', 'name': 'volume-b38117da-190d-46b9-8928-95fc2ddfa1bc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f6972b90-7746-4a37-8be8-1739f96dc3dc', 'attached_at': '', 'detached_at': '', 'volume_id': 'b38117da-190d-46b9-8928-95fc2ddfa1bc', 'serial': 'b38117da-190d-46b9-8928-95fc2ddfa1bc'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 849.641718] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bdc74e9-00ef-4b50-a85f-c957dfcbb5e9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.665609] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86f319d-a528-45d2-8256-b1b260d36e16 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.674137] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25ec08cb-f7f8-46fe-8b41-4e7ce873532e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.695958] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99b9e80d-d6f1-485f-81a4-687e32f1e378 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.715205] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3aab598-8503-474a-b670-d119f52f7579 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] The volume has not been displaced from its original location: [datastore1] volume-b38117da-190d-46b9-8928-95fc2ddfa1bc/volume-b38117da-190d-46b9-8928-95fc2ddfa1bc.vmdk. No consolidation needed. {{(pid=69927) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 849.720686] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3aab598-8503-474a-b670-d119f52f7579 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Reconfiguring VM instance instance-0000001a to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 849.721104] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54eb494f-b10a-4b80-9988-bea2f4091eb8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.740230] env[69927]: DEBUG oslo_vmware.api [None req-c3aab598-8503-474a-b670-d119f52f7579 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 849.740230] env[69927]: value = "task-4095874" [ 849.740230] env[69927]: _type = "Task" [ 849.740230] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.752253] env[69927]: DEBUG oslo_vmware.api [None req-c3aab598-8503-474a-b670-d119f52f7579 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095874, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.759568] env[69927]: DEBUG nova.scheduler.client.report [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 849.973079] env[69927]: DEBUG oslo_concurrency.lockutils [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Releasing lock "refresh_cache-c7451ca3-f1fc-469b-b9d2-7fe24cb8949e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.973501] env[69927]: DEBUG nova.compute.manager [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Instance network_info: |[{"id": "d61e5620-93c1-42e0-b372-c977dbc31d1c", "address": "fa:16:3e:e2:d2:f2", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd61e5620-93", "ovs_interfaceid": "d61e5620-93c1-42e0-b372-c977dbc31d1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 849.973953] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:d2:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '10b81051-1eb1-406b-888c-4548c470c77e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd61e5620-93c1-42e0-b372-c977dbc31d1c', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 849.986939] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Creating folder: Project (2079784fef244b8883f56f9cfeaf2a6d). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 849.986939] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-46974fa6-2bfa-47dd-8ca7-4f37bf09adb7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.998826] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Task: {'id': task-4095873, 'name': Rename_Task, 'duration_secs': 0.362163} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.999297] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 849.999632] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6ec9017d-a0f5-4262-96d0-5fd6a1676e11 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.003299] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Created folder: Project (2079784fef244b8883f56f9cfeaf2a6d) in parent group-v811283. [ 850.006089] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Creating folder: Instances. Parent ref: group-v811424. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 850.006554] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d95c4437-bb07-4ce0-ae0a-21e8bb4c6797 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.011319] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Waiting for the task: (returnval){ [ 850.011319] env[69927]: value = "task-4095876" [ 850.011319] env[69927]: _type = "Task" [ 850.011319] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.020403] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Created folder: Instances in parent group-v811424. [ 850.020664] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 850.021320] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 850.021584] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c70eb51-d8c6-4c12-a7e8-bf270972a372 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.046034] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Task: {'id': task-4095876, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.053951] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 850.053951] env[69927]: value = "task-4095878" [ 850.053951] env[69927]: _type = "Task" [ 850.053951] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.064883] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095878, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.084214] env[69927]: DEBUG nova.compute.manager [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 850.121020] env[69927]: DEBUG nova.virt.hardware [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 850.121020] env[69927]: DEBUG nova.virt.hardware [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 850.121020] env[69927]: DEBUG nova.virt.hardware [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 850.121270] env[69927]: DEBUG nova.virt.hardware [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 850.121270] env[69927]: DEBUG nova.virt.hardware [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 850.121387] env[69927]: DEBUG nova.virt.hardware [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 850.121600] env[69927]: DEBUG nova.virt.hardware [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 850.122840] env[69927]: DEBUG nova.virt.hardware [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 850.122840] env[69927]: DEBUG nova.virt.hardware [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 850.122840] env[69927]: DEBUG nova.virt.hardware [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 850.122840] env[69927]: DEBUG nova.virt.hardware [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 850.125313] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d8dbe86-8ec0-4684-a0c3-2ecf06e32843 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.133909] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b8ac07f-f672-47e5-8314-5d1726d9cc94 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.252138] env[69927]: DEBUG oslo_vmware.api [None req-c3aab598-8503-474a-b670-d119f52f7579 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095874, 'name': ReconfigVM_Task, 'duration_secs': 0.252101} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.252464] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3aab598-8503-474a-b670-d119f52f7579 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Reconfigured VM instance instance-0000001a to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 850.258081] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04307def-2386-4be5-b93f-92fe31cda821 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.272798] env[69927]: DEBUG oslo_concurrency.lockutils [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.216s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 850.275244] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.838s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 850.276092] env[69927]: DEBUG nova.objects.instance [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lazy-loading 'resources' on Instance uuid 0e6e60e7-d623-44da-912e-804da4d616c9 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 850.285286] env[69927]: DEBUG oslo_vmware.api [None req-c3aab598-8503-474a-b670-d119f52f7579 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 850.285286] env[69927]: value = "task-4095879" [ 850.285286] env[69927]: _type = "Task" [ 850.285286] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.288077] env[69927]: DEBUG nova.compute.manager [req-4f0cfabc-f6b0-4377-bdec-b0139b3ea6e3 req-2bb9716a-bf0e-417b-b114-ac06d292ca7a service nova] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Received event network-changed-d61e5620-93c1-42e0-b372-c977dbc31d1c {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 850.288338] env[69927]: DEBUG nova.compute.manager [req-4f0cfabc-f6b0-4377-bdec-b0139b3ea6e3 req-2bb9716a-bf0e-417b-b114-ac06d292ca7a service nova] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Refreshing instance network info cache due to event network-changed-d61e5620-93c1-42e0-b372-c977dbc31d1c. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 850.289495] env[69927]: DEBUG oslo_concurrency.lockutils [req-4f0cfabc-f6b0-4377-bdec-b0139b3ea6e3 req-2bb9716a-bf0e-417b-b114-ac06d292ca7a service nova] Acquiring lock "refresh_cache-c7451ca3-f1fc-469b-b9d2-7fe24cb8949e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.289495] env[69927]: DEBUG oslo_concurrency.lockutils [req-4f0cfabc-f6b0-4377-bdec-b0139b3ea6e3 req-2bb9716a-bf0e-417b-b114-ac06d292ca7a service nova] Acquired lock "refresh_cache-c7451ca3-f1fc-469b-b9d2-7fe24cb8949e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 850.289495] env[69927]: DEBUG nova.network.neutron [req-4f0cfabc-f6b0-4377-bdec-b0139b3ea6e3 req-2bb9716a-bf0e-417b-b114-ac06d292ca7a service nova] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Refreshing network info cache for port d61e5620-93c1-42e0-b372-c977dbc31d1c {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 850.303252] env[69927]: DEBUG oslo_vmware.api [None req-c3aab598-8503-474a-b670-d119f52f7579 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095879, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.310243] env[69927]: INFO nova.scheduler.client.report [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Deleted allocations for instance 915797c5-6f68-4355-a6b0-ad2b06b826cb [ 850.525040] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Task: {'id': task-4095876, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.571195] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095878, 'name': CreateVM_Task, 'duration_secs': 0.411319} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.571522] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 850.575441] env[69927]: DEBUG oslo_concurrency.lockutils [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.575441] env[69927]: DEBUG oslo_concurrency.lockutils [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 850.575441] env[69927]: DEBUG oslo_concurrency.lockutils [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 850.575441] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cc4a885-1da3-4999-86bc-3595182d62b2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.586024] env[69927]: DEBUG nova.compute.manager [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 850.586024] env[69927]: DEBUG oslo_vmware.api [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Waiting for the task: (returnval){ [ 850.586024] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52df4a17-11e3-6370-87f1-7595bc6f4d51" [ 850.586024] env[69927]: _type = "Task" [ 850.586024] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.586024] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2307f122-d8b5-4001-af9c-112f229d67f3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.603326] env[69927]: DEBUG oslo_vmware.api [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52df4a17-11e3-6370-87f1-7595bc6f4d51, 'name': SearchDatastore_Task, 'duration_secs': 0.012652} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.607461] env[69927]: DEBUG oslo_concurrency.lockutils [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 850.609068] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 850.609068] env[69927]: DEBUG oslo_concurrency.lockutils [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.609068] env[69927]: DEBUG oslo_concurrency.lockutils [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 850.609068] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 850.609658] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-26c2d415-5c95-42e9-9b04-f2dfe80f0a7a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.624300] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 850.624300] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 850.624300] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-caffb69b-3c8c-49e9-a5d5-19f9f55d1243 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.630957] env[69927]: DEBUG oslo_vmware.api [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Waiting for the task: (returnval){ [ 850.630957] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52401a8c-76ad-0211-742e-1dff93ff6d64" [ 850.630957] env[69927]: _type = "Task" [ 850.630957] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.643317] env[69927]: DEBUG oslo_vmware.api [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52401a8c-76ad-0211-742e-1dff93ff6d64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.801635] env[69927]: DEBUG oslo_vmware.api [None req-c3aab598-8503-474a-b670-d119f52f7579 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095879, 'name': ReconfigVM_Task, 'duration_secs': 0.182102} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.803321] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3aab598-8503-474a-b670-d119f52f7579 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811397', 'volume_id': 'b38117da-190d-46b9-8928-95fc2ddfa1bc', 'name': 'volume-b38117da-190d-46b9-8928-95fc2ddfa1bc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f6972b90-7746-4a37-8be8-1739f96dc3dc', 'attached_at': '', 'detached_at': '', 'volume_id': 'b38117da-190d-46b9-8928-95fc2ddfa1bc', 'serial': 'b38117da-190d-46b9-8928-95fc2ddfa1bc'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 850.823485] env[69927]: DEBUG oslo_concurrency.lockutils [None req-30d82a6d-aadd-4127-a20b-06e5ad238918 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "915797c5-6f68-4355-a6b0-ad2b06b826cb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.041s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.022916] env[69927]: DEBUG oslo_vmware.api [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Task: {'id': task-4095876, 'name': PowerOnVM_Task, 'duration_secs': 0.605054} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.023703] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 851.023703] env[69927]: INFO nova.compute.manager [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Took 11.04 seconds to spawn the instance on the hypervisor. [ 851.023703] env[69927]: DEBUG nova.compute.manager [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 851.025538] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0999640-cb7a-44e4-9cc0-63923b4102b2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.108461] env[69927]: INFO nova.compute.manager [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] instance snapshotting [ 851.108627] env[69927]: WARNING nova.compute.manager [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 851.112107] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c85ea13-4f7d-4f81-8547-6d70066c4e63 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.146067] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-162f538d-7bb4-4214-98a4-3c9ac767c5de {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.172636] env[69927]: DEBUG oslo_vmware.api [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52401a8c-76ad-0211-742e-1dff93ff6d64, 'name': SearchDatastore_Task, 'duration_secs': 0.011191} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.174607] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3d9c057-621b-4035-95da-d6c5a7eab7af {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.182919] env[69927]: DEBUG oslo_vmware.api [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Waiting for the task: (returnval){ [ 851.182919] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5231b900-8784-55d9-3364-49dd53f12fb9" [ 851.182919] env[69927]: _type = "Task" [ 851.182919] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.199502] env[69927]: DEBUG oslo_vmware.api [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5231b900-8784-55d9-3364-49dd53f12fb9, 'name': SearchDatastore_Task, 'duration_secs': 0.013154} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.199933] env[69927]: DEBUG oslo_concurrency.lockutils [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 851.200349] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] c7451ca3-f1fc-469b-b9d2-7fe24cb8949e/c7451ca3-f1fc-469b-b9d2-7fe24cb8949e.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 851.200730] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea3bdc03-17e1-4efd-be45-ab4e087e4582 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.210346] env[69927]: DEBUG oslo_vmware.api [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Waiting for the task: (returnval){ [ 851.210346] env[69927]: value = "task-4095880" [ 851.210346] env[69927]: _type = "Task" [ 851.210346] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.221962] env[69927]: DEBUG oslo_vmware.api [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Task: {'id': task-4095880, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.236347] env[69927]: DEBUG nova.network.neutron [req-4f0cfabc-f6b0-4377-bdec-b0139b3ea6e3 req-2bb9716a-bf0e-417b-b114-ac06d292ca7a service nova] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Updated VIF entry in instance network info cache for port d61e5620-93c1-42e0-b372-c977dbc31d1c. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 851.236347] env[69927]: DEBUG nova.network.neutron [req-4f0cfabc-f6b0-4377-bdec-b0139b3ea6e3 req-2bb9716a-bf0e-417b-b114-ac06d292ca7a service nova] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Updating instance_info_cache with network_info: [{"id": "d61e5620-93c1-42e0-b372-c977dbc31d1c", "address": "fa:16:3e:e2:d2:f2", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd61e5620-93", "ovs_interfaceid": "d61e5620-93c1-42e0-b372-c977dbc31d1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.262370] env[69927]: DEBUG nova.network.neutron [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Successfully updated port: ff1455ee-4ab2-4e4d-ac72-6a6554002936 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 851.390638] env[69927]: DEBUG nova.objects.instance [None req-c3aab598-8503-474a-b670-d119f52f7579 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lazy-loading 'flavor' on Instance uuid f6972b90-7746-4a37-8be8-1739f96dc3dc {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 851.477980] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585ed9ee-0d7a-4ed5-a773-52b9ec768c83 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.486719] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa7e4f9-34c8-4326-b01d-3ad6b135ce8a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.519578] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b7ad0b3-5c88-4992-9f84-2a0766410742 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.528940] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-967421a7-c7ad-41c8-84ba-19388564894c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.551874] env[69927]: DEBUG nova.compute.provider_tree [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 851.554886] env[69927]: INFO nova.compute.manager [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Took 48.40 seconds to build instance. [ 851.674637] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Creating Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 851.674996] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4312ed05-8132-41ea-9809-b09461155c99 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.685668] env[69927]: DEBUG oslo_vmware.api [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 851.685668] env[69927]: value = "task-4095881" [ 851.685668] env[69927]: _type = "Task" [ 851.685668] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.696147] env[69927]: DEBUG oslo_vmware.api [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095881, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.722459] env[69927]: DEBUG oslo_vmware.api [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Task: {'id': task-4095880, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.740536] env[69927]: DEBUG oslo_concurrency.lockutils [req-4f0cfabc-f6b0-4377-bdec-b0139b3ea6e3 req-2bb9716a-bf0e-417b-b114-ac06d292ca7a service nova] Releasing lock "refresh_cache-c7451ca3-f1fc-469b-b9d2-7fe24cb8949e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 851.767450] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquiring lock "refresh_cache-6be47dcb-ce00-4b81-9e69-35acabac046e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.767450] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquired lock "refresh_cache-6be47dcb-ce00-4b81-9e69-35acabac046e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 851.767450] env[69927]: DEBUG nova.network.neutron [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 852.057169] env[69927]: DEBUG nova.scheduler.client.report [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 852.060812] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8f50dc13-917b-4b8b-81ef-6615620e1989 tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Lock "8de4160d-2282-4ed3-bdf0-349445a6eab8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.398s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.198572] env[69927]: DEBUG oslo_vmware.api [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095881, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.223809] env[69927]: DEBUG oslo_vmware.api [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Task: {'id': task-4095880, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.557417} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.224163] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] c7451ca3-f1fc-469b-b9d2-7fe24cb8949e/c7451ca3-f1fc-469b-b9d2-7fe24cb8949e.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 852.224600] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 852.224851] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-60becca7-65a5-4ae4-8e4b-5078aaff96d7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.235224] env[69927]: DEBUG oslo_vmware.api [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Waiting for the task: (returnval){ [ 852.235224] env[69927]: value = "task-4095882" [ 852.235224] env[69927]: _type = "Task" [ 852.235224] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.244365] env[69927]: DEBUG oslo_vmware.api [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Task: {'id': task-4095882, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.325509] env[69927]: DEBUG nova.network.neutron [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 852.361579] env[69927]: DEBUG nova.compute.manager [req-6768c82d-19ef-4ee8-a453-8239c96bc6d9 req-c9ff0725-9907-4607-a955-b48ed481c036 service nova] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Received event network-vif-plugged-ff1455ee-4ab2-4e4d-ac72-6a6554002936 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 852.361811] env[69927]: DEBUG oslo_concurrency.lockutils [req-6768c82d-19ef-4ee8-a453-8239c96bc6d9 req-c9ff0725-9907-4607-a955-b48ed481c036 service nova] Acquiring lock "6be47dcb-ce00-4b81-9e69-35acabac046e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.362049] env[69927]: DEBUG oslo_concurrency.lockutils [req-6768c82d-19ef-4ee8-a453-8239c96bc6d9 req-c9ff0725-9907-4607-a955-b48ed481c036 service nova] Lock "6be47dcb-ce00-4b81-9e69-35acabac046e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.364222] env[69927]: DEBUG oslo_concurrency.lockutils [req-6768c82d-19ef-4ee8-a453-8239c96bc6d9 req-c9ff0725-9907-4607-a955-b48ed481c036 service nova] Lock "6be47dcb-ce00-4b81-9e69-35acabac046e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.364498] env[69927]: DEBUG nova.compute.manager [req-6768c82d-19ef-4ee8-a453-8239c96bc6d9 req-c9ff0725-9907-4607-a955-b48ed481c036 service nova] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] No waiting events found dispatching network-vif-plugged-ff1455ee-4ab2-4e4d-ac72-6a6554002936 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 852.364696] env[69927]: WARNING nova.compute.manager [req-6768c82d-19ef-4ee8-a453-8239c96bc6d9 req-c9ff0725-9907-4607-a955-b48ed481c036 service nova] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Received unexpected event network-vif-plugged-ff1455ee-4ab2-4e4d-ac72-6a6554002936 for instance with vm_state building and task_state spawning. [ 852.364913] env[69927]: DEBUG nova.compute.manager [req-6768c82d-19ef-4ee8-a453-8239c96bc6d9 req-c9ff0725-9907-4607-a955-b48ed481c036 service nova] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Received event network-changed-ff1455ee-4ab2-4e4d-ac72-6a6554002936 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 852.365093] env[69927]: DEBUG nova.compute.manager [req-6768c82d-19ef-4ee8-a453-8239c96bc6d9 req-c9ff0725-9907-4607-a955-b48ed481c036 service nova] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Refreshing instance network info cache due to event network-changed-ff1455ee-4ab2-4e4d-ac72-6a6554002936. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 852.365305] env[69927]: DEBUG oslo_concurrency.lockutils [req-6768c82d-19ef-4ee8-a453-8239c96bc6d9 req-c9ff0725-9907-4607-a955-b48ed481c036 service nova] Acquiring lock "refresh_cache-6be47dcb-ce00-4b81-9e69-35acabac046e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.399045] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c3aab598-8503-474a-b670-d119f52f7579 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "f6972b90-7746-4a37-8be8-1739f96dc3dc" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.315s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.518536] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bfc012a6-b05d-40c9-8525-6eaadb957270 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "f6972b90-7746-4a37-8be8-1739f96dc3dc" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.519350] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bfc012a6-b05d-40c9-8525-6eaadb957270 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "f6972b90-7746-4a37-8be8-1739f96dc3dc" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.520893] env[69927]: DEBUG nova.compute.manager [None req-bfc012a6-b05d-40c9-8525-6eaadb957270 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 852.522295] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c4943c-5f2f-4127-9095-994ba118a45d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.532537] env[69927]: DEBUG nova.compute.manager [None req-bfc012a6-b05d-40c9-8525-6eaadb957270 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69927) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 852.533846] env[69927]: DEBUG nova.objects.instance [None req-bfc012a6-b05d-40c9-8525-6eaadb957270 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lazy-loading 'flavor' on Instance uuid f6972b90-7746-4a37-8be8-1739f96dc3dc {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 852.536991] env[69927]: DEBUG nova.network.neutron [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Updating instance_info_cache with network_info: [{"id": "ff1455ee-4ab2-4e4d-ac72-6a6554002936", "address": "fa:16:3e:06:ea:cd", "network": {"id": "f5504ce1-4b4d-41f8-90bc-de06d36f55a7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-747484643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "5e5c81a6491a4090b807b7328df7d8ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d650b26-c3e7-4de7-98db-5e4b816d123a", "external-id": "nsx-vlan-transportzone-757", "segmentation_id": 757, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff1455ee-4a", "ovs_interfaceid": "ff1455ee-4ab2-4e4d-ac72-6a6554002936", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.564021] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.287s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.565162] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.324s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.568904] env[69927]: INFO nova.compute.claims [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 852.572240] env[69927]: DEBUG nova.compute.manager [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 852.614637] env[69927]: INFO nova.scheduler.client.report [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Deleted allocations for instance 0e6e60e7-d623-44da-912e-804da4d616c9 [ 852.710249] env[69927]: DEBUG oslo_vmware.api [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095881, 'name': CreateSnapshot_Task, 'duration_secs': 0.572698} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.710552] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Created Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 852.711632] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a5df4e-4367-487a-8816-880e92844d3e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.745503] env[69927]: DEBUG oslo_vmware.api [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Task: {'id': task-4095882, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.108214} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.745803] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 852.746951] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c66b48d-2824-4138-aaba-d90c25afada1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.772431] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Reconfiguring VM instance instance-0000002d to attach disk [datastore2] c7451ca3-f1fc-469b-b9d2-7fe24cb8949e/c7451ca3-f1fc-469b-b9d2-7fe24cb8949e.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 852.773381] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d764947-0df7-4b54-b945-1a51d8418444 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.795579] env[69927]: DEBUG oslo_vmware.api [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Waiting for the task: (returnval){ [ 852.795579] env[69927]: value = "task-4095883" [ 852.795579] env[69927]: _type = "Task" [ 852.795579] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.806501] env[69927]: DEBUG oslo_vmware.api [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Task: {'id': task-4095883, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.041776] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Releasing lock "refresh_cache-6be47dcb-ce00-4b81-9e69-35acabac046e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 853.043411] env[69927]: DEBUG nova.compute.manager [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Instance network_info: |[{"id": "ff1455ee-4ab2-4e4d-ac72-6a6554002936", "address": "fa:16:3e:06:ea:cd", "network": {"id": "f5504ce1-4b4d-41f8-90bc-de06d36f55a7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-747484643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "5e5c81a6491a4090b807b7328df7d8ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d650b26-c3e7-4de7-98db-5e4b816d123a", "external-id": "nsx-vlan-transportzone-757", "segmentation_id": 757, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff1455ee-4a", "ovs_interfaceid": "ff1455ee-4ab2-4e4d-ac72-6a6554002936", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 853.043411] env[69927]: DEBUG oslo_concurrency.lockutils [req-6768c82d-19ef-4ee8-a453-8239c96bc6d9 req-c9ff0725-9907-4607-a955-b48ed481c036 service nova] Acquired lock "refresh_cache-6be47dcb-ce00-4b81-9e69-35acabac046e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 853.043616] env[69927]: DEBUG nova.network.neutron [req-6768c82d-19ef-4ee8-a453-8239c96bc6d9 req-c9ff0725-9907-4607-a955-b48ed481c036 service nova] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Refreshing network info cache for port ff1455ee-4ab2-4e4d-ac72-6a6554002936 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 853.044660] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:ea:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d650b26-c3e7-4de7-98db-5e4b816d123a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff1455ee-4ab2-4e4d-ac72-6a6554002936', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 853.052497] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 853.054182] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 853.055485] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4fd06e89-07d1-4daa-8723-107f57c52c0a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.075975] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfc012a6-b05d-40c9-8525-6eaadb957270 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 853.080193] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7f82e812-efaa-412a-aa5f-789e1e98a8ad {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.095241] env[69927]: DEBUG oslo_vmware.api [None req-bfc012a6-b05d-40c9-8525-6eaadb957270 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 853.095241] env[69927]: value = "task-4095885" [ 853.095241] env[69927]: _type = "Task" [ 853.095241] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.095602] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 853.095602] env[69927]: value = "task-4095884" [ 853.095602] env[69927]: _type = "Task" [ 853.095602] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.108973] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 853.118375] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095884, 'name': CreateVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.125589] env[69927]: DEBUG oslo_vmware.api [None req-bfc012a6-b05d-40c9-8525-6eaadb957270 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095885, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.126262] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d73c346e-2e36-4bff-a5af-4aee29d7c15c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "0e6e60e7-d623-44da-912e-804da4d616c9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.216s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 853.240427] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Creating linked-clone VM from snapshot {{(pid=69927) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 853.240979] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-764fe215-7ab3-4b8c-9378-a019ba9ee945 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.253435] env[69927]: DEBUG oslo_vmware.api [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 853.253435] env[69927]: value = "task-4095886" [ 853.253435] env[69927]: _type = "Task" [ 853.253435] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.265565] env[69927]: DEBUG oslo_vmware.api [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095886, 'name': CloneVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.308934] env[69927]: DEBUG oslo_vmware.api [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Task: {'id': task-4095883, 'name': ReconfigVM_Task, 'duration_secs': 0.35658} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.309338] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Reconfigured VM instance instance-0000002d to attach disk [datastore2] c7451ca3-f1fc-469b-b9d2-7fe24cb8949e/c7451ca3-f1fc-469b-b9d2-7fe24cb8949e.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 853.310283] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4a6ef5f0-bafc-4644-8de3-6a646faae29a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.320113] env[69927]: DEBUG oslo_vmware.api [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Waiting for the task: (returnval){ [ 853.320113] env[69927]: value = "task-4095887" [ 853.320113] env[69927]: _type = "Task" [ 853.320113] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.332458] env[69927]: DEBUG oslo_vmware.api [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Task: {'id': task-4095887, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.621110] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095884, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.631818] env[69927]: DEBUG oslo_vmware.api [None req-bfc012a6-b05d-40c9-8525-6eaadb957270 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095885, 'name': PowerOffVM_Task, 'duration_secs': 0.249178} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.632423] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfc012a6-b05d-40c9-8525-6eaadb957270 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 853.632639] env[69927]: DEBUG nova.compute.manager [None req-bfc012a6-b05d-40c9-8525-6eaadb957270 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 853.633535] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19840b85-bce6-479d-8568-c77b8b80f9ec {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.765660] env[69927]: DEBUG oslo_vmware.api [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095886, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.839834] env[69927]: DEBUG oslo_vmware.api [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Task: {'id': task-4095887, 'name': Rename_Task, 'duration_secs': 0.173089} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.843816] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 853.848470] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c64d0348-11aa-46d9-8b4c-c64afda5d4ea {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.856686] env[69927]: DEBUG oslo_vmware.api [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Waiting for the task: (returnval){ [ 853.856686] env[69927]: value = "task-4095888" [ 853.856686] env[69927]: _type = "Task" [ 853.856686] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.870874] env[69927]: DEBUG oslo_vmware.api [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Task: {'id': task-4095888, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.903803] env[69927]: DEBUG nova.network.neutron [req-6768c82d-19ef-4ee8-a453-8239c96bc6d9 req-c9ff0725-9907-4607-a955-b48ed481c036 service nova] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Updated VIF entry in instance network info cache for port ff1455ee-4ab2-4e4d-ac72-6a6554002936. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 853.903803] env[69927]: DEBUG nova.network.neutron [req-6768c82d-19ef-4ee8-a453-8239c96bc6d9 req-c9ff0725-9907-4607-a955-b48ed481c036 service nova] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Updating instance_info_cache with network_info: [{"id": "ff1455ee-4ab2-4e4d-ac72-6a6554002936", "address": "fa:16:3e:06:ea:cd", "network": {"id": "f5504ce1-4b4d-41f8-90bc-de06d36f55a7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-747484643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "5e5c81a6491a4090b807b7328df7d8ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d650b26-c3e7-4de7-98db-5e4b816d123a", "external-id": "nsx-vlan-transportzone-757", "segmentation_id": 757, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff1455ee-4a", "ovs_interfaceid": "ff1455ee-4ab2-4e4d-ac72-6a6554002936", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.112527] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095884, 'name': CreateVM_Task, 'duration_secs': 0.727168} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.113033] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 854.117564] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.117564] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 854.117564] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 854.117564] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20badda1-34bd-4fec-bd87-b039dfaacef7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.125721] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 854.125721] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f890f0-d621-df53-a6a1-ec67459b376a" [ 854.125721] env[69927]: _type = "Task" [ 854.125721] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.136392] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f890f0-d621-df53-a6a1-ec67459b376a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.152357] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bfc012a6-b05d-40c9-8525-6eaadb957270 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "f6972b90-7746-4a37-8be8-1739f96dc3dc" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.632s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.248941] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b5a0e4-5c72-43bc-9e32-baccac60b4a6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.263315] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-661f1f30-5371-4f46-9ca7-b6bed6f46bbb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.271975] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Acquiring lock "50eedb80-d4bc-42c4-9686-6549cbd675b7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.272322] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Lock "50eedb80-d4bc-42c4-9686-6549cbd675b7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.277153] env[69927]: DEBUG oslo_vmware.api [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095886, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.307301] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28171229-f30f-4544-9ef9-a47ce56c3f3c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.317147] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d75b06-bcae-4788-9aa8-d0fc6f262fdf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.334468] env[69927]: DEBUG nova.compute.provider_tree [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.368494] env[69927]: DEBUG oslo_vmware.api [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Task: {'id': task-4095888, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.409383] env[69927]: DEBUG oslo_concurrency.lockutils [req-6768c82d-19ef-4ee8-a453-8239c96bc6d9 req-c9ff0725-9907-4607-a955-b48ed481c036 service nova] Releasing lock "refresh_cache-6be47dcb-ce00-4b81-9e69-35acabac046e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 854.456526] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "8be7e64c-7bc6-41a0-ada5-0a5057a2af45" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.456797] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "8be7e64c-7bc6-41a0-ada5-0a5057a2af45" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.491791] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "358ecaef-37f0-42be-acce-00f389650c97" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.492135] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "358ecaef-37f0-42be-acce-00f389650c97" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.637714] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f890f0-d621-df53-a6a1-ec67459b376a, 'name': SearchDatastore_Task, 'duration_secs': 0.01302} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.638089] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 854.638348] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 854.638608] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.638773] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 854.638977] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 854.639284] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-42bf0521-ba29-44a8-a621-d2e41e5803f4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.650811] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 854.651192] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 854.652193] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba7a6b14-3b4b-4e2b-a3a0-811bdccbc505 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.664205] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 854.664205] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e5e3f4-b4cc-6f99-aff9-a0530c547890" [ 854.664205] env[69927]: _type = "Task" [ 854.664205] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.678255] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e5e3f4-b4cc-6f99-aff9-a0530c547890, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.768779] env[69927]: DEBUG oslo_vmware.api [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095886, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.840939] env[69927]: DEBUG nova.scheduler.client.report [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 854.871474] env[69927]: DEBUG oslo_vmware.api [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Task: {'id': task-4095888, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.163065] env[69927]: DEBUG nova.objects.instance [None req-38000e7e-d635-422a-a5ac-6a0f2bbf7089 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lazy-loading 'flavor' on Instance uuid f6972b90-7746-4a37-8be8-1739f96dc3dc {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 855.175197] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e5e3f4-b4cc-6f99-aff9-a0530c547890, 'name': SearchDatastore_Task, 'duration_secs': 0.013619} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.176084] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c825337f-8d8a-49ce-bdd0-24ecfc07e77e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.182537] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 855.182537] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5243bca1-11d8-5999-aa51-2c780200bae6" [ 855.182537] env[69927]: _type = "Task" [ 855.182537] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.192413] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5243bca1-11d8-5999-aa51-2c780200bae6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.227103] env[69927]: DEBUG oslo_concurrency.lockutils [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Acquiring lock "8de4160d-2282-4ed3-bdf0-349445a6eab8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 855.227394] env[69927]: DEBUG oslo_concurrency.lockutils [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Lock "8de4160d-2282-4ed3-bdf0-349445a6eab8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 855.227606] env[69927]: DEBUG oslo_concurrency.lockutils [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Acquiring lock "8de4160d-2282-4ed3-bdf0-349445a6eab8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 855.227778] env[69927]: DEBUG oslo_concurrency.lockutils [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Lock "8de4160d-2282-4ed3-bdf0-349445a6eab8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 855.227945] env[69927]: DEBUG oslo_concurrency.lockutils [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Lock "8de4160d-2282-4ed3-bdf0-349445a6eab8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.232916] env[69927]: INFO nova.compute.manager [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Terminating instance [ 855.270723] env[69927]: DEBUG oslo_vmware.api [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095886, 'name': CloneVM_Task, 'duration_secs': 1.75418} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.271015] env[69927]: INFO nova.virt.vmwareapi.vmops [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Created linked-clone VM from snapshot [ 855.271815] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d20d39-79bd-468c-bd3b-cb0b61f3fb53 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.280709] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Uploading image 3400d513-87f1-466e-b71f-d465ac6ba592 {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 855.302899] env[69927]: DEBUG oslo_vmware.rw_handles [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 855.302899] env[69927]: value = "vm-811429" [ 855.302899] env[69927]: _type = "VirtualMachine" [ 855.302899] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 855.303233] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-8af7771a-2e91-4206-a18b-ca45788c4f16 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.311334] env[69927]: DEBUG oslo_vmware.rw_handles [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lease: (returnval){ [ 855.311334] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52bfac02-88dc-883e-1315-fd1702d39b20" [ 855.311334] env[69927]: _type = "HttpNfcLease" [ 855.311334] env[69927]: } obtained for exporting VM: (result){ [ 855.311334] env[69927]: value = "vm-811429" [ 855.311334] env[69927]: _type = "VirtualMachine" [ 855.311334] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 855.311671] env[69927]: DEBUG oslo_vmware.api [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the lease: (returnval){ [ 855.311671] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52bfac02-88dc-883e-1315-fd1702d39b20" [ 855.311671] env[69927]: _type = "HttpNfcLease" [ 855.311671] env[69927]: } to be ready. {{(pid=69927) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 855.319631] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 855.319631] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52bfac02-88dc-883e-1315-fd1702d39b20" [ 855.319631] env[69927]: _type = "HttpNfcLease" [ 855.319631] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 855.347557] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.782s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.348139] env[69927]: DEBUG nova.compute.manager [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 855.350748] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.290s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 855.350965] env[69927]: DEBUG nova.objects.instance [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lazy-loading 'resources' on Instance uuid 14359034-232d-478f-bf65-cf9937c59229 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 855.369006] env[69927]: DEBUG oslo_vmware.api [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Task: {'id': task-4095888, 'name': PowerOnVM_Task, 'duration_secs': 1.087913} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.369373] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 855.369591] env[69927]: INFO nova.compute.manager [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Took 8.07 seconds to spawn the instance on the hypervisor. [ 855.369785] env[69927]: DEBUG nova.compute.manager [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 855.370598] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5c77ce-e195-4c1c-a6eb-d5e12de643aa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.670206] env[69927]: DEBUG oslo_concurrency.lockutils [None req-38000e7e-d635-422a-a5ac-6a0f2bbf7089 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "refresh_cache-f6972b90-7746-4a37-8be8-1739f96dc3dc" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.670575] env[69927]: DEBUG oslo_concurrency.lockutils [None req-38000e7e-d635-422a-a5ac-6a0f2bbf7089 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquired lock "refresh_cache-f6972b90-7746-4a37-8be8-1739f96dc3dc" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 855.670895] env[69927]: DEBUG nova.network.neutron [None req-38000e7e-d635-422a-a5ac-6a0f2bbf7089 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 855.671256] env[69927]: DEBUG nova.objects.instance [None req-38000e7e-d635-422a-a5ac-6a0f2bbf7089 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lazy-loading 'info_cache' on Instance uuid f6972b90-7746-4a37-8be8-1739f96dc3dc {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 855.694911] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5243bca1-11d8-5999-aa51-2c780200bae6, 'name': SearchDatastore_Task, 'duration_secs': 0.014688} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.695271] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 855.695538] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 6be47dcb-ce00-4b81-9e69-35acabac046e/6be47dcb-ce00-4b81-9e69-35acabac046e.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 855.695879] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6df20337-8d4e-4b57-83e4-95234d6f19bf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.703849] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 855.703849] env[69927]: value = "task-4095890" [ 855.703849] env[69927]: _type = "Task" [ 855.703849] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.714278] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095890, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.737131] env[69927]: DEBUG nova.compute.manager [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 855.737467] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 855.738444] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef59780b-e5d3-4dc6-8a38-fd53d11f5b07 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.747825] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 855.747943] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3027cd8d-deac-49f2-b8b3-59d5a64d7c8a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.756114] env[69927]: DEBUG oslo_vmware.api [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Waiting for the task: (returnval){ [ 855.756114] env[69927]: value = "task-4095891" [ 855.756114] env[69927]: _type = "Task" [ 855.756114] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.768109] env[69927]: DEBUG oslo_vmware.api [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Task: {'id': task-4095891, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.820893] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 855.820893] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52bfac02-88dc-883e-1315-fd1702d39b20" [ 855.820893] env[69927]: _type = "HttpNfcLease" [ 855.820893] env[69927]: } is ready. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 855.821529] env[69927]: DEBUG oslo_vmware.rw_handles [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 855.821529] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52bfac02-88dc-883e-1315-fd1702d39b20" [ 855.821529] env[69927]: _type = "HttpNfcLease" [ 855.821529] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 855.822217] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e429914-49bf-41cc-a5d2-aff0c6e9db25 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.831532] env[69927]: DEBUG oslo_vmware.rw_handles [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52dc702a-089c-69f0-88b2-d4991f199d18/disk-0.vmdk from lease info. {{(pid=69927) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 855.831723] env[69927]: DEBUG oslo_vmware.rw_handles [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52dc702a-089c-69f0-88b2-d4991f199d18/disk-0.vmdk for reading. {{(pid=69927) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 855.889178] env[69927]: DEBUG nova.compute.utils [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 855.903342] env[69927]: DEBUG nova.compute.manager [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 855.903342] env[69927]: DEBUG nova.network.neutron [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 855.908919] env[69927]: INFO nova.compute.manager [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Took 48.71 seconds to build instance. [ 855.951779] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d4355b8f-2d09-4539-8c5c-91ebf3a335bc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.019274] env[69927]: DEBUG nova.policy [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b66d74a85f3d4d31a4efce8a8df01cc0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be278be46f3d48df818c834df17c663f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 856.177105] env[69927]: DEBUG nova.objects.base [None req-38000e7e-d635-422a-a5ac-6a0f2bbf7089 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=69927) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 856.222206] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095890, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.271624] env[69927]: DEBUG oslo_vmware.api [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Task: {'id': task-4095891, 'name': PowerOffVM_Task, 'duration_secs': 0.295343} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.271909] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 856.272566] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 856.272768] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f2945781-c764-461a-91bb-b850871125a9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.401487] env[69927]: DEBUG nova.compute.manager [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 856.410706] env[69927]: DEBUG oslo_concurrency.lockutils [None req-47ebfba2-25f9-485d-bd78-4bca2d523364 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Lock "c7451ca3-f1fc-469b-b9d2-7fe24cb8949e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 108.749s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 856.432496] env[69927]: DEBUG nova.network.neutron [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Successfully created port: 32049b49-d761-48ff-8938-d76ebe86f62e {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 856.638458] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faff6e94-1070-459d-9b8e-6a55edb8c065 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.649244] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-432e45d4-0263-4b39-9f78-2cdb2c7fd97b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.521069] env[69927]: DEBUG nova.compute.manager [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 857.534863] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef6538a-5231-4a9d-a99e-af8b753a2b11 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.537920] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 857.538161] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 857.538432] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Deleting the datastore file [datastore1] 8de4160d-2282-4ed3-bdf0-349445a6eab8 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 857.540035] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8801dff8-6f41-4ec3-a2a0-07da8817d671 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.549597] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095890, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.543192} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.553469] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 6be47dcb-ce00-4b81-9e69-35acabac046e/6be47dcb-ce00-4b81-9e69-35acabac046e.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 857.553820] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 857.554851] env[69927]: DEBUG oslo_vmware.api [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Waiting for the task: (returnval){ [ 857.554851] env[69927]: value = "task-4095893" [ 857.554851] env[69927]: _type = "Task" [ 857.554851] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.555211] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5c60f920-99cc-441f-a5bd-46d37b6b53d1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.558529] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c84b2c4-ec87-4d2e-845f-2217a4d312bf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.586098] env[69927]: DEBUG nova.compute.provider_tree [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 857.591019] env[69927]: DEBUG oslo_vmware.api [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Task: {'id': task-4095893, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.591019] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 857.591019] env[69927]: value = "task-4095894" [ 857.591019] env[69927]: _type = "Task" [ 857.591019] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.602301] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095894, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.027548] env[69927]: DEBUG nova.compute.manager [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 858.039769] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Acquiring lock "c7451ca3-f1fc-469b-b9d2-7fe24cb8949e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.041530] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Lock "c7451ca3-f1fc-469b-b9d2-7fe24cb8949e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.041841] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Acquiring lock "c7451ca3-f1fc-469b-b9d2-7fe24cb8949e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.042092] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Lock "c7451ca3-f1fc-469b-b9d2-7fe24cb8949e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.042303] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Lock "c7451ca3-f1fc-469b-b9d2-7fe24cb8949e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 858.044244] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Acquiring lock "7ce79e41-333a-4ef3-ba68-f74067d4ac5a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.044478] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Lock "7ce79e41-333a-4ef3-ba68-f74067d4ac5a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.044777] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Acquiring lock "7ce79e41-333a-4ef3-ba68-f74067d4ac5a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.048132] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Lock "7ce79e41-333a-4ef3-ba68-f74067d4ac5a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.052642] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Lock "7ce79e41-333a-4ef3-ba68-f74067d4ac5a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.003s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 858.052642] env[69927]: DEBUG nova.network.neutron [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Successfully updated port: 32049b49-d761-48ff-8938-d76ebe86f62e {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 858.054399] env[69927]: INFO nova.compute.manager [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Terminating instance [ 858.057258] env[69927]: INFO nova.compute.manager [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Terminating instance [ 858.064911] env[69927]: DEBUG nova.network.neutron [None req-38000e7e-d635-422a-a5ac-6a0f2bbf7089 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Updating instance_info_cache with network_info: [{"id": "2d989e8c-d768-494a-a866-4da8ff809d05", "address": "fa:16:3e:02:b9:e7", "network": {"id": "e48c0c04-ce1d-41db-8e56-0954dfca129f", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2007911751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0b560d18954fd68f7eceeb96c37055", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d989e8c-d7", "ovs_interfaceid": "2d989e8c-d768-494a-a866-4da8ff809d05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.074827] env[69927]: DEBUG nova.virt.hardware [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:38:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='6803b7fa-1500-4d6d-8f68-b7ab4453032d',id=32,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1704385790',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 858.075179] env[69927]: DEBUG nova.virt.hardware [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 858.075386] env[69927]: DEBUG nova.virt.hardware [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 858.075600] env[69927]: DEBUG nova.virt.hardware [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 858.075901] env[69927]: DEBUG nova.virt.hardware [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 858.075901] env[69927]: DEBUG nova.virt.hardware [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 858.076180] env[69927]: DEBUG nova.virt.hardware [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 858.076378] env[69927]: DEBUG nova.virt.hardware [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 858.077706] env[69927]: DEBUG nova.virt.hardware [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 858.077706] env[69927]: DEBUG nova.virt.hardware [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 858.077706] env[69927]: DEBUG nova.virt.hardware [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 858.078758] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-690ef545-b69d-4b91-8391-66e52a3d17af {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.083497] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.088283] env[69927]: DEBUG oslo_vmware.api [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Task: {'id': task-4095893, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.227876} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.088881] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 858.089192] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 858.089442] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 858.089739] env[69927]: INFO nova.compute.manager [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Took 2.35 seconds to destroy the instance on the hypervisor. [ 858.090064] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 858.091116] env[69927]: DEBUG nova.scheduler.client.report [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 858.094839] env[69927]: DEBUG nova.compute.manager [-] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 858.094971] env[69927]: DEBUG nova.network.neutron [-] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 858.105116] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d34cc87-1eb1-4b80-9e05-54013b63d119 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.118096] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095894, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078916} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.128624] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 858.128624] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0541394b-65e4-4bdc-bff9-f9a8ed26f039 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.160044] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] 6be47dcb-ce00-4b81-9e69-35acabac046e/6be47dcb-ce00-4b81-9e69-35acabac046e.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 858.163781] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b21d6611-198e-4fe1-8dac-e7a1e5e68988 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.187547] env[69927]: DEBUG nova.compute.manager [req-d48d3163-0888-48e8-b01e-0e5055a9048f req-b4583421-ba15-4ae8-b375-bacef6b456cf service nova] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Received event network-vif-plugged-32049b49-d761-48ff-8938-d76ebe86f62e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 858.187747] env[69927]: DEBUG oslo_concurrency.lockutils [req-d48d3163-0888-48e8-b01e-0e5055a9048f req-b4583421-ba15-4ae8-b375-bacef6b456cf service nova] Acquiring lock "c3e8a429-8484-4b11-abe3-1cccf0992556-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.187924] env[69927]: DEBUG oslo_concurrency.lockutils [req-d48d3163-0888-48e8-b01e-0e5055a9048f req-b4583421-ba15-4ae8-b375-bacef6b456cf service nova] Lock "c3e8a429-8484-4b11-abe3-1cccf0992556-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.189247] env[69927]: DEBUG oslo_concurrency.lockutils [req-d48d3163-0888-48e8-b01e-0e5055a9048f req-b4583421-ba15-4ae8-b375-bacef6b456cf service nova] Lock "c3e8a429-8484-4b11-abe3-1cccf0992556-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 858.189247] env[69927]: DEBUG nova.compute.manager [req-d48d3163-0888-48e8-b01e-0e5055a9048f req-b4583421-ba15-4ae8-b375-bacef6b456cf service nova] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] No waiting events found dispatching network-vif-plugged-32049b49-d761-48ff-8938-d76ebe86f62e {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 858.189247] env[69927]: WARNING nova.compute.manager [req-d48d3163-0888-48e8-b01e-0e5055a9048f req-b4583421-ba15-4ae8-b375-bacef6b456cf service nova] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Received unexpected event network-vif-plugged-32049b49-d761-48ff-8938-d76ebe86f62e for instance with vm_state building and task_state spawning. [ 858.195486] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 858.195486] env[69927]: value = "task-4095895" [ 858.195486] env[69927]: _type = "Task" [ 858.195486] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.206065] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095895, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.560424] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "refresh_cache-c3e8a429-8484-4b11-abe3-1cccf0992556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.560424] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquired lock "refresh_cache-c3e8a429-8484-4b11-abe3-1cccf0992556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 858.560424] env[69927]: DEBUG nova.network.neutron [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 858.567321] env[69927]: DEBUG nova.compute.manager [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 858.567606] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 858.568255] env[69927]: DEBUG nova.compute.manager [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 858.568452] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 858.571864] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df87fd3d-29fc-45af-93b8-40fab7e4b928 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.576151] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb8c47c-6f87-4ab3-81ce-2687f7ac221b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.580787] env[69927]: DEBUG oslo_concurrency.lockutils [None req-38000e7e-d635-422a-a5ac-6a0f2bbf7089 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Releasing lock "refresh_cache-f6972b90-7746-4a37-8be8-1739f96dc3dc" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 858.581747] env[69927]: DEBUG nova.network.neutron [-] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.589105] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 858.592510] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-154e1324-f5f7-40fd-bc7c-981c4d1a7e6a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.594810] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 858.595258] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-360cc6f0-0b76-4bdc-9c54-2385bbef3cdc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.600691] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.250s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 858.607118] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.220s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.607118] env[69927]: DEBUG nova.objects.instance [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lazy-loading 'resources' on Instance uuid 353ceb53-07e6-4e9b-bed5-ce9fca368b27 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 858.608244] env[69927]: DEBUG oslo_vmware.api [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Waiting for the task: (returnval){ [ 858.608244] env[69927]: value = "task-4095896" [ 858.608244] env[69927]: _type = "Task" [ 858.608244] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.608988] env[69927]: DEBUG oslo_vmware.api [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Waiting for the task: (returnval){ [ 858.608988] env[69927]: value = "task-4095897" [ 858.608988] env[69927]: _type = "Task" [ 858.608988] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.625504] env[69927]: DEBUG oslo_vmware.api [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Task: {'id': task-4095897, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.630114] env[69927]: DEBUG oslo_vmware.api [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Task: {'id': task-4095896, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.631538] env[69927]: INFO nova.scheduler.client.report [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Deleted allocations for instance 14359034-232d-478f-bf65-cf9937c59229 [ 858.709015] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095895, 'name': ReconfigVM_Task, 'duration_secs': 0.334388} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.709539] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Reconfigured VM instance instance-0000002e to attach disk [datastore2] 6be47dcb-ce00-4b81-9e69-35acabac046e/6be47dcb-ce00-4b81-9e69-35acabac046e.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 858.710343] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-01758c4d-3d5e-46a2-9f3b-0e9af559a3e7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.719149] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 858.719149] env[69927]: value = "task-4095898" [ 858.719149] env[69927]: _type = "Task" [ 858.719149] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.728586] env[69927]: DEBUG nova.compute.manager [req-170c1e61-cc06-49e0-9956-f32781fc061e req-fc77b2ed-b551-4764-8fcb-4c6f40c4d042 service nova] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Received event network-vif-deleted-3da71ace-83d3-4b37-b02c-724f53d7f8bf {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 858.733970] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095898, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.086743] env[69927]: INFO nova.compute.manager [-] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Took 0.99 seconds to deallocate network for instance. [ 859.117372] env[69927]: DEBUG nova.network.neutron [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 859.135223] env[69927]: DEBUG oslo_vmware.api [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Task: {'id': task-4095897, 'name': PowerOffVM_Task, 'duration_secs': 0.330384} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.141158] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 859.141158] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 859.141733] env[69927]: DEBUG oslo_vmware.api [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Task: {'id': task-4095896, 'name': PowerOffVM_Task, 'duration_secs': 0.341959} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.142179] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b84c3127-a21f-46ff-9353-58dba1752723 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lock "14359034-232d-478f-bf65-cf9937c59229" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.674s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 859.143087] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79493be1-a4da-42de-ad14-c08864f114da {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.145943] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 859.146647] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 859.149391] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-86611a3a-8a45-4fb8-9429-90eff72a146b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.230566] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095898, 'name': Rename_Task, 'duration_secs': 0.221124} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.230896] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 859.231177] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e4058e29-7583-4c9f-a996-dd4e0471e2e3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.240155] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 859.240155] env[69927]: value = "task-4095901" [ 859.240155] env[69927]: _type = "Task" [ 859.240155] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.255145] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095901, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.265842] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 859.266127] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 859.266324] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Deleting the datastore file [datastore2] c7451ca3-f1fc-469b-b9d2-7fe24cb8949e {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 859.267539] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-85a39ebb-88b3-4100-8b62-43f6665075a5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.269829] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 859.270099] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 859.270287] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Deleting the datastore file [datastore2] 7ce79e41-333a-4ef3-ba68-f74067d4ac5a {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 859.270774] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1dc8da49-81ea-4786-9b1c-d61c9c0b524f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.279714] env[69927]: DEBUG oslo_vmware.api [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Waiting for the task: (returnval){ [ 859.279714] env[69927]: value = "task-4095902" [ 859.279714] env[69927]: _type = "Task" [ 859.279714] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.285785] env[69927]: DEBUG oslo_vmware.api [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Waiting for the task: (returnval){ [ 859.285785] env[69927]: value = "task-4095903" [ 859.285785] env[69927]: _type = "Task" [ 859.285785] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.295923] env[69927]: DEBUG oslo_vmware.api [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Task: {'id': task-4095902, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.308336] env[69927]: DEBUG oslo_vmware.api [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Task: {'id': task-4095903, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.434332] env[69927]: DEBUG nova.network.neutron [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Updating instance_info_cache with network_info: [{"id": "32049b49-d761-48ff-8938-d76ebe86f62e", "address": "fa:16:3e:64:c8:10", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.140", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32049b49-d7", "ovs_interfaceid": "32049b49-d761-48ff-8938-d76ebe86f62e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.590346] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-38000e7e-d635-422a-a5ac-6a0f2bbf7089 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 859.590953] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-17a708a4-0292-4a00-9b7f-086f246a1fa8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.597379] env[69927]: DEBUG oslo_concurrency.lockutils [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.599070] env[69927]: DEBUG oslo_vmware.api [None req-38000e7e-d635-422a-a5ac-6a0f2bbf7089 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 859.599070] env[69927]: value = "task-4095904" [ 859.599070] env[69927]: _type = "Task" [ 859.599070] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.610937] env[69927]: DEBUG oslo_vmware.api [None req-38000e7e-d635-422a-a5ac-6a0f2bbf7089 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095904, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.702283] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd8a812-11ff-4cb5-a552-6443153a94d4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.711141] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7477d00-d9c3-42e3-a92e-3fbf35aaae66 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.744176] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a568f2d3-c79e-40e4-b5a8-13b244d7ffb7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.760344] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65fdaec6-913d-4b0b-88cc-4726724cca98 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.765176] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095901, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.776590] env[69927]: DEBUG nova.compute.provider_tree [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 859.794401] env[69927]: DEBUG oslo_vmware.api [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Task: {'id': task-4095902, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.218669} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.795775] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 859.796052] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 859.796213] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 859.796400] env[69927]: INFO nova.compute.manager [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Took 1.23 seconds to destroy the instance on the hypervisor. [ 859.796652] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 859.797372] env[69927]: DEBUG nova.compute.manager [-] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 859.797455] env[69927]: DEBUG nova.network.neutron [-] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 859.802614] env[69927]: DEBUG oslo_vmware.api [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Task: {'id': task-4095903, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.219289} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.803269] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 859.803450] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 859.803613] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 859.803841] env[69927]: INFO nova.compute.manager [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Took 1.24 seconds to destroy the instance on the hypervisor. [ 859.804021] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 859.804228] env[69927]: DEBUG nova.compute.manager [-] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 859.804319] env[69927]: DEBUG nova.network.neutron [-] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 859.937597] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Releasing lock "refresh_cache-c3e8a429-8484-4b11-abe3-1cccf0992556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 859.937715] env[69927]: DEBUG nova.compute.manager [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Instance network_info: |[{"id": "32049b49-d761-48ff-8938-d76ebe86f62e", "address": "fa:16:3e:64:c8:10", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.140", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32049b49-d7", "ovs_interfaceid": "32049b49-d761-48ff-8938-d76ebe86f62e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 859.938130] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:c8:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '10b81051-1eb1-406b-888c-4548c470c77e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '32049b49-d761-48ff-8938-d76ebe86f62e', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 859.947507] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 859.947778] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 859.948029] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e331367d-559a-4ec7-9b8e-a8acf2bae547 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.970476] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 859.970476] env[69927]: value = "task-4095905" [ 859.970476] env[69927]: _type = "Task" [ 859.970476] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.983616] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095905, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.986506] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Acquiring lock "a536b069-45e0-4ffe-be53-ac33f8cb6ec0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.986943] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lock "a536b069-45e0-4ffe-be53-ac33f8cb6ec0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.989125] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Acquiring lock "a536b069-45e0-4ffe-be53-ac33f8cb6ec0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.989125] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lock "a536b069-45e0-4ffe-be53-ac33f8cb6ec0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.989125] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lock "a536b069-45e0-4ffe-be53-ac33f8cb6ec0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 859.992133] env[69927]: INFO nova.compute.manager [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Terminating instance [ 860.116350] env[69927]: DEBUG oslo_vmware.api [None req-38000e7e-d635-422a-a5ac-6a0f2bbf7089 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095904, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.257675] env[69927]: DEBUG oslo_vmware.api [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095901, 'name': PowerOnVM_Task, 'duration_secs': 0.635274} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.258057] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 860.258312] env[69927]: INFO nova.compute.manager [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Took 10.17 seconds to spawn the instance on the hypervisor. [ 860.258531] env[69927]: DEBUG nova.compute.manager [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 860.259519] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567f00b6-bcde-4979-b021-bb07985b7479 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.286512] env[69927]: DEBUG nova.scheduler.client.report [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 860.371103] env[69927]: DEBUG nova.compute.manager [req-dde49f05-15c9-4ce6-a5aa-152dc4817355 req-a1f7c3c6-1381-4aad-a119-6cf1ea79abc2 service nova] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Received event network-changed-32049b49-d761-48ff-8938-d76ebe86f62e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 860.371300] env[69927]: DEBUG nova.compute.manager [req-dde49f05-15c9-4ce6-a5aa-152dc4817355 req-a1f7c3c6-1381-4aad-a119-6cf1ea79abc2 service nova] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Refreshing instance network info cache due to event network-changed-32049b49-d761-48ff-8938-d76ebe86f62e. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 860.371558] env[69927]: DEBUG oslo_concurrency.lockutils [req-dde49f05-15c9-4ce6-a5aa-152dc4817355 req-a1f7c3c6-1381-4aad-a119-6cf1ea79abc2 service nova] Acquiring lock "refresh_cache-c3e8a429-8484-4b11-abe3-1cccf0992556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.371642] env[69927]: DEBUG oslo_concurrency.lockutils [req-dde49f05-15c9-4ce6-a5aa-152dc4817355 req-a1f7c3c6-1381-4aad-a119-6cf1ea79abc2 service nova] Acquired lock "refresh_cache-c3e8a429-8484-4b11-abe3-1cccf0992556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 860.372583] env[69927]: DEBUG nova.network.neutron [req-dde49f05-15c9-4ce6-a5aa-152dc4817355 req-a1f7c3c6-1381-4aad-a119-6cf1ea79abc2 service nova] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Refreshing network info cache for port 32049b49-d761-48ff-8938-d76ebe86f62e {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 860.485076] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095905, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.497428] env[69927]: DEBUG nova.compute.manager [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 860.497428] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 860.498431] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19714b1d-5ef1-41a3-8723-ea66b5bebbba {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.508057] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 860.508057] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9ba5fb69-a052-42da-a00c-0eb1d014e51a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.516111] env[69927]: DEBUG oslo_vmware.api [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 860.516111] env[69927]: value = "task-4095906" [ 860.516111] env[69927]: _type = "Task" [ 860.516111] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.527651] env[69927]: DEBUG oslo_vmware.api [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095906, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.617525] env[69927]: DEBUG oslo_vmware.api [None req-38000e7e-d635-422a-a5ac-6a0f2bbf7089 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4095904, 'name': PowerOnVM_Task, 'duration_secs': 0.551998} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.617906] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-38000e7e-d635-422a-a5ac-6a0f2bbf7089 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 860.618030] env[69927]: DEBUG nova.compute.manager [None req-38000e7e-d635-422a-a5ac-6a0f2bbf7089 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 860.618826] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35785df9-cc43-4239-afeb-c65dbce8f233 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.790816] env[69927]: INFO nova.compute.manager [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Took 49.99 seconds to build instance. [ 860.791796] env[69927]: DEBUG nova.network.neutron [-] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.794542] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.188s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.797165] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.225s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 860.799319] env[69927]: INFO nova.compute.claims [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 860.830920] env[69927]: INFO nova.scheduler.client.report [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Deleted allocations for instance 353ceb53-07e6-4e9b-bed5-ce9fca368b27 [ 860.881733] env[69927]: DEBUG nova.network.neutron [-] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.886331] env[69927]: DEBUG nova.compute.manager [req-de26ea84-e7cf-4b8e-81bb-95c983f4303f req-0cbe6dff-c261-43ba-8907-62b4fc15b2a7 service nova] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Received event network-vif-deleted-d61e5620-93c1-42e0-b372-c977dbc31d1c {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 860.982960] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095905, 'name': CreateVM_Task, 'duration_secs': 0.540631} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.983158] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 860.983847] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.983998] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 860.984334] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 860.984582] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72b0fe3f-8ef0-4fdb-892e-b1b6bfaff24e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.990262] env[69927]: DEBUG oslo_vmware.api [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 860.990262] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52392eef-2c96-6f64-236d-cd6f428a7ed0" [ 860.990262] env[69927]: _type = "Task" [ 860.990262] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.999630] env[69927]: DEBUG oslo_vmware.api [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52392eef-2c96-6f64-236d-cd6f428a7ed0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.029327] env[69927]: DEBUG oslo_vmware.api [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095906, 'name': PowerOffVM_Task, 'duration_secs': 0.240017} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.029470] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 861.029618] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 861.029877] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7a2f4b7b-1018-4f52-b97c-eda3bf3222c6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.107917] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 861.108490] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 861.108490] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Deleting the datastore file [datastore1] a536b069-45e0-4ffe-be53-ac33f8cb6ec0 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 861.108643] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-def17ce4-288b-43f6-8b5a-772265df74f2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.117062] env[69927]: DEBUG nova.network.neutron [req-dde49f05-15c9-4ce6-a5aa-152dc4817355 req-a1f7c3c6-1381-4aad-a119-6cf1ea79abc2 service nova] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Updated VIF entry in instance network info cache for port 32049b49-d761-48ff-8938-d76ebe86f62e. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 861.117529] env[69927]: DEBUG nova.network.neutron [req-dde49f05-15c9-4ce6-a5aa-152dc4817355 req-a1f7c3c6-1381-4aad-a119-6cf1ea79abc2 service nova] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Updating instance_info_cache with network_info: [{"id": "32049b49-d761-48ff-8938-d76ebe86f62e", "address": "fa:16:3e:64:c8:10", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.140", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32049b49-d7", "ovs_interfaceid": "32049b49-d761-48ff-8938-d76ebe86f62e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.120575] env[69927]: DEBUG oslo_vmware.api [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for the task: (returnval){ [ 861.120575] env[69927]: value = "task-4095908" [ 861.120575] env[69927]: _type = "Task" [ 861.120575] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.134579] env[69927]: DEBUG oslo_vmware.api [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095908, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.298071] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9d42ef4-a631-4537-84e8-fa7838b78460 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Lock "6be47dcb-ce00-4b81-9e69-35acabac046e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 106.813s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.298716] env[69927]: INFO nova.compute.manager [-] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Took 1.50 seconds to deallocate network for instance. [ 861.344262] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dfa4e4a6-ed70-41a2-aba8-7468a56f257d tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "353ceb53-07e6-4e9b-bed5-ce9fca368b27" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.939s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.391203] env[69927]: INFO nova.compute.manager [-] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Took 1.58 seconds to deallocate network for instance. [ 861.505412] env[69927]: DEBUG oslo_vmware.api [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52392eef-2c96-6f64-236d-cd6f428a7ed0, 'name': SearchDatastore_Task, 'duration_secs': 0.011452} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.506037] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 861.506126] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 861.506728] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.506728] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 861.506728] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 861.507222] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c5e78ac-d31a-4ca4-9d24-5200d6003ec7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.518618] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 861.519297] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 861.520159] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-919b6a83-cc36-4053-a158-382652fb3f8e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.530432] env[69927]: DEBUG oslo_vmware.api [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 861.530432] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b12ea0-1678-60b0-4859-ae6d259059cc" [ 861.530432] env[69927]: _type = "Task" [ 861.530432] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.545561] env[69927]: DEBUG oslo_vmware.api [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b12ea0-1678-60b0-4859-ae6d259059cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.623009] env[69927]: DEBUG oslo_concurrency.lockutils [req-dde49f05-15c9-4ce6-a5aa-152dc4817355 req-a1f7c3c6-1381-4aad-a119-6cf1ea79abc2 service nova] Releasing lock "refresh_cache-c3e8a429-8484-4b11-abe3-1cccf0992556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 861.633568] env[69927]: DEBUG oslo_vmware.api [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Task: {'id': task-4095908, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.183725} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.633841] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 861.634036] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 861.634214] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 861.634385] env[69927]: INFO nova.compute.manager [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Took 1.14 seconds to destroy the instance on the hypervisor. [ 861.634632] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 861.634837] env[69927]: DEBUG nova.compute.manager [-] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 861.634929] env[69927]: DEBUG nova.network.neutron [-] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 861.803788] env[69927]: DEBUG nova.compute.manager [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 861.815015] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.891133] env[69927]: INFO nova.compute.manager [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Rescuing [ 861.891524] env[69927]: DEBUG oslo_concurrency.lockutils [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquiring lock "refresh_cache-6be47dcb-ce00-4b81-9e69-35acabac046e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.891599] env[69927]: DEBUG oslo_concurrency.lockutils [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquired lock "refresh_cache-6be47dcb-ce00-4b81-9e69-35acabac046e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 861.891709] env[69927]: DEBUG nova.network.neutron [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 861.899648] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 862.057575] env[69927]: DEBUG oslo_vmware.api [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b12ea0-1678-60b0-4859-ae6d259059cc, 'name': SearchDatastore_Task, 'duration_secs': 0.027468} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.064430] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4589b4be-021e-4e24-a2c1-8c099bc36762 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.072936] env[69927]: DEBUG oslo_vmware.api [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 862.072936] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a0a739-2a19-17ab-c24a-96bdcbb730b4" [ 862.072936] env[69927]: _type = "Task" [ 862.072936] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.093829] env[69927]: DEBUG oslo_vmware.api [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a0a739-2a19-17ab-c24a-96bdcbb730b4, 'name': SearchDatastore_Task, 'duration_secs': 0.013982} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.094295] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 862.094689] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] c3e8a429-8484-4b11-abe3-1cccf0992556/c3e8a429-8484-4b11-abe3-1cccf0992556.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 862.099165] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aaaef190-e677-4557-aa4d-7f525230bf02 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.107901] env[69927]: DEBUG oslo_vmware.api [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 862.107901] env[69927]: value = "task-4095909" [ 862.107901] env[69927]: _type = "Task" [ 862.107901] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.118322] env[69927]: DEBUG oslo_vmware.api [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095909, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.330942] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 862.412627] env[69927]: DEBUG nova.network.neutron [-] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.444410] env[69927]: DEBUG nova.compute.manager [req-c1357160-0a02-42d7-8f51-eea2fac59ebf req-45aa9e50-42a0-41a4-9ded-af763b8bd6b3 service nova] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Received event network-vif-deleted-b112a351-376b-4433-94a9-e8e186f3dff3 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 862.444638] env[69927]: DEBUG nova.compute.manager [req-c1357160-0a02-42d7-8f51-eea2fac59ebf req-45aa9e50-42a0-41a4-9ded-af763b8bd6b3 service nova] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Received event network-vif-deleted-fbaaf58d-7762-4361-af98-ac03d24f2a05 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 862.474219] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eeccbf5-e7aa-48d3-ba1b-faf729d48564 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.485538] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7faa5eb1-c090-4779-81cb-f4e39f7b5e45 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.528492] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58941a8a-aa90-4914-ae99-8415c06f60fa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.537735] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb16467-6f5a-453b-92cf-e01006e54aad {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.557050] env[69927]: DEBUG nova.compute.provider_tree [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 862.622297] env[69927]: DEBUG oslo_vmware.api [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095909, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.717355] env[69927]: DEBUG nova.network.neutron [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Updating instance_info_cache with network_info: [{"id": "ff1455ee-4ab2-4e4d-ac72-6a6554002936", "address": "fa:16:3e:06:ea:cd", "network": {"id": "f5504ce1-4b4d-41f8-90bc-de06d36f55a7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-747484643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "5e5c81a6491a4090b807b7328df7d8ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d650b26-c3e7-4de7-98db-5e4b816d123a", "external-id": "nsx-vlan-transportzone-757", "segmentation_id": 757, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff1455ee-4a", "ovs_interfaceid": "ff1455ee-4ab2-4e4d-ac72-6a6554002936", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.917751] env[69927]: INFO nova.compute.manager [-] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Took 1.28 seconds to deallocate network for instance. [ 863.065255] env[69927]: DEBUG nova.scheduler.client.report [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 863.124784] env[69927]: DEBUG oslo_vmware.api [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095909, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.580059} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.125359] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] c3e8a429-8484-4b11-abe3-1cccf0992556/c3e8a429-8484-4b11-abe3-1cccf0992556.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 863.125716] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 863.126115] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c6c33489-20e5-46d7-9051-e2c524fdfe49 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.135688] env[69927]: DEBUG oslo_vmware.api [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 863.135688] env[69927]: value = "task-4095910" [ 863.135688] env[69927]: _type = "Task" [ 863.135688] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.147700] env[69927]: DEBUG oslo_vmware.api [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095910, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.221051] env[69927]: DEBUG oslo_concurrency.lockutils [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Releasing lock "refresh_cache-6be47dcb-ce00-4b81-9e69-35acabac046e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.428244] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 863.570731] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.773s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.571397] env[69927]: DEBUG nova.compute.manager [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 863.574299] env[69927]: DEBUG oslo_concurrency.lockutils [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.019s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.575797] env[69927]: INFO nova.compute.claims [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 863.647219] env[69927]: DEBUG oslo_vmware.api [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095910, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076409} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.647219] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 863.647582] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1988b0fe-9703-4b68-958d-b09df1309a9b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.671406] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] c3e8a429-8484-4b11-abe3-1cccf0992556/c3e8a429-8484-4b11-abe3-1cccf0992556.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 863.671710] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0a878c9-89ef-4513-9953-f181e9095db0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.692621] env[69927]: DEBUG oslo_vmware.api [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 863.692621] env[69927]: value = "task-4095911" [ 863.692621] env[69927]: _type = "Task" [ 863.692621] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.701641] env[69927]: DEBUG oslo_vmware.api [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095911, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.080683] env[69927]: DEBUG nova.compute.utils [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 864.085374] env[69927]: DEBUG nova.compute.manager [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 864.091504] env[69927]: DEBUG nova.network.neutron [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 864.112749] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquiring lock "30d9d1ac-4be0-4723-86b5-0aceda88e67b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 864.113354] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "30d9d1ac-4be0-4723-86b5-0aceda88e67b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 864.149894] env[69927]: DEBUG nova.policy [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd89d0a2232b4da1a0b88799062fe8da', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3afde63c8cbe4aecb32a470fd6b948f6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 864.206129] env[69927]: DEBUG oslo_vmware.api [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095911, 'name': ReconfigVM_Task, 'duration_secs': 0.306396} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.206129] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Reconfigured VM instance instance-0000002f to attach disk [datastore2] c3e8a429-8484-4b11-abe3-1cccf0992556/c3e8a429-8484-4b11-abe3-1cccf0992556.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 864.206793] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4aff2e9e-c8b6-49b3-add8-00ec2371e2b1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.216292] env[69927]: DEBUG oslo_vmware.api [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 864.216292] env[69927]: value = "task-4095912" [ 864.216292] env[69927]: _type = "Task" [ 864.216292] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.227008] env[69927]: DEBUG oslo_vmware.api [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095912, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.480506] env[69927]: DEBUG nova.network.neutron [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Successfully created port: 0a3c7cb5-f42d-407f-8561-e9c2695bced8 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 864.586823] env[69927]: DEBUG nova.compute.manager [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 864.727230] env[69927]: DEBUG oslo_vmware.api [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095912, 'name': Rename_Task, 'duration_secs': 0.15736} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.730010] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 864.730699] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-267fd45a-117c-4aeb-b6ff-1114ebcb5670 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.738380] env[69927]: DEBUG oslo_vmware.api [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 864.738380] env[69927]: value = "task-4095913" [ 864.738380] env[69927]: _type = "Task" [ 864.738380] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.749751] env[69927]: DEBUG oslo_vmware.api [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095913, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.758695] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 864.759101] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f36d3a1b-287e-4cfc-8225-d3ea7ebefd46 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.769849] env[69927]: DEBUG oslo_vmware.api [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 864.769849] env[69927]: value = "task-4095914" [ 864.769849] env[69927]: _type = "Task" [ 864.769849] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.779706] env[69927]: DEBUG oslo_vmware.api [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095914, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.159606] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973b432b-df05-451c-8803-c324ebbea10d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.168725] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb58d95-d079-422f-bfe3-7d8176942891 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.204029] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-970777b1-bb97-4660-b543-8cafa74dfc03 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.213206] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0fd59cf-2fec-4042-b7dc-ca7b993ef43e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.230226] env[69927]: DEBUG nova.compute.provider_tree [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 865.252719] env[69927]: DEBUG oslo_vmware.api [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095913, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.279982] env[69927]: DEBUG oslo_vmware.api [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095914, 'name': PowerOffVM_Task, 'duration_secs': 0.323119} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.280303] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 865.281169] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a34d43f5-267a-4927-b7ea-4347cb2dbb38 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.302953] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03bc55d4-aaa5-4193-8050-3ff14e07240e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.337914] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 865.338258] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-729a2a3f-6903-4210-be99-88c402d7c283 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.346228] env[69927]: DEBUG oslo_vmware.api [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 865.346228] env[69927]: value = "task-4095915" [ 865.346228] env[69927]: _type = "Task" [ 865.346228] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.357012] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] VM already powered off {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 865.357347] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 865.357627] env[69927]: DEBUG oslo_concurrency.lockutils [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.357724] env[69927]: DEBUG oslo_concurrency.lockutils [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.357881] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 865.358157] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-62056a50-e252-4e67-aabc-419523b35a6f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.371111] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 865.371111] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 865.371501] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-009982ed-3dd6-4a14-a5c7-f5ccfc72d48d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.378112] env[69927]: DEBUG oslo_vmware.api [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 865.378112] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f77ee5-a0e4-9f2d-d35b-8b51d631e26d" [ 865.378112] env[69927]: _type = "Task" [ 865.378112] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.388660] env[69927]: DEBUG oslo_vmware.api [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f77ee5-a0e4-9f2d-d35b-8b51d631e26d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.605221] env[69927]: DEBUG nova.compute.manager [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 865.634291] env[69927]: DEBUG nova.virt.hardware [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 865.634656] env[69927]: DEBUG nova.virt.hardware [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 865.634745] env[69927]: DEBUG nova.virt.hardware [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 865.634962] env[69927]: DEBUG nova.virt.hardware [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 865.635265] env[69927]: DEBUG nova.virt.hardware [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 865.635500] env[69927]: DEBUG nova.virt.hardware [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 865.635692] env[69927]: DEBUG nova.virt.hardware [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 865.635855] env[69927]: DEBUG nova.virt.hardware [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 865.636032] env[69927]: DEBUG nova.virt.hardware [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 865.636210] env[69927]: DEBUG nova.virt.hardware [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 865.636392] env[69927]: DEBUG nova.virt.hardware [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 865.637321] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aab391d-a66c-4cd6-bf21-7576c133151f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.646203] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-630d3867-d755-4780-9b74-9054929f12a1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.734231] env[69927]: DEBUG nova.scheduler.client.report [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 865.752131] env[69927]: DEBUG oslo_vmware.api [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4095913, 'name': PowerOnVM_Task, 'duration_secs': 0.528258} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.754037] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 865.754309] env[69927]: INFO nova.compute.manager [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Took 7.73 seconds to spawn the instance on the hypervisor. [ 865.754502] env[69927]: DEBUG nova.compute.manager [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 865.755694] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f76cceff-228a-495c-8757-1bd9fb54d9c9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.891158] env[69927]: DEBUG oslo_vmware.api [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f77ee5-a0e4-9f2d-d35b-8b51d631e26d, 'name': SearchDatastore_Task, 'duration_secs': 0.01042} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.891726] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-110a4d17-e8ea-40a2-b567-aa9461c39e41 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.898822] env[69927]: DEBUG oslo_vmware.api [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 865.898822] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5238682f-002f-7fa2-592b-5e7992123589" [ 865.898822] env[69927]: _type = "Task" [ 865.898822] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.908017] env[69927]: DEBUG oslo_vmware.api [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5238682f-002f-7fa2-592b-5e7992123589, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.929718] env[69927]: DEBUG nova.compute.manager [req-cee2baf3-7fd3-4869-be48-a09512fbfdd9 req-ad8a00e4-9ba3-4bbe-85d2-4053a06f50aa service nova] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Received event network-vif-plugged-0a3c7cb5-f42d-407f-8561-e9c2695bced8 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 865.929718] env[69927]: DEBUG oslo_concurrency.lockutils [req-cee2baf3-7fd3-4869-be48-a09512fbfdd9 req-ad8a00e4-9ba3-4bbe-85d2-4053a06f50aa service nova] Acquiring lock "66ba8086-2dd4-4d02-aac3-1bbb4a404784-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.929963] env[69927]: DEBUG oslo_concurrency.lockutils [req-cee2baf3-7fd3-4869-be48-a09512fbfdd9 req-ad8a00e4-9ba3-4bbe-85d2-4053a06f50aa service nova] Lock "66ba8086-2dd4-4d02-aac3-1bbb4a404784-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.930032] env[69927]: DEBUG oslo_concurrency.lockutils [req-cee2baf3-7fd3-4869-be48-a09512fbfdd9 req-ad8a00e4-9ba3-4bbe-85d2-4053a06f50aa service nova] Lock "66ba8086-2dd4-4d02-aac3-1bbb4a404784-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.930170] env[69927]: DEBUG nova.compute.manager [req-cee2baf3-7fd3-4869-be48-a09512fbfdd9 req-ad8a00e4-9ba3-4bbe-85d2-4053a06f50aa service nova] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] No waiting events found dispatching network-vif-plugged-0a3c7cb5-f42d-407f-8561-e9c2695bced8 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 865.930341] env[69927]: WARNING nova.compute.manager [req-cee2baf3-7fd3-4869-be48-a09512fbfdd9 req-ad8a00e4-9ba3-4bbe-85d2-4053a06f50aa service nova] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Received unexpected event network-vif-plugged-0a3c7cb5-f42d-407f-8561-e9c2695bced8 for instance with vm_state building and task_state spawning. [ 866.038353] env[69927]: DEBUG nova.network.neutron [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Successfully updated port: 0a3c7cb5-f42d-407f-8561-e9c2695bced8 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 866.239648] env[69927]: DEBUG oslo_concurrency.lockutils [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.665s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 866.240228] env[69927]: DEBUG nova.compute.manager [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 866.243145] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.989s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 866.245036] env[69927]: INFO nova.compute.claims [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 866.279284] env[69927]: INFO nova.compute.manager [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Took 52.06 seconds to build instance. [ 866.412039] env[69927]: DEBUG oslo_vmware.api [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5238682f-002f-7fa2-592b-5e7992123589, 'name': SearchDatastore_Task, 'duration_secs': 0.011085} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.412039] env[69927]: DEBUG oslo_concurrency.lockutils [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 866.412039] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 6be47dcb-ce00-4b81-9e69-35acabac046e/f524494e-9179-4b3e-a3e2-782f019def24-rescue.vmdk. {{(pid=69927) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 866.412039] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de94aa82-784d-4661-83cd-2e9e4c62cc9d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.419907] env[69927]: DEBUG oslo_vmware.api [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 866.419907] env[69927]: value = "task-4095916" [ 866.419907] env[69927]: _type = "Task" [ 866.419907] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.430290] env[69927]: DEBUG oslo_vmware.api [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095916, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.540766] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "refresh_cache-66ba8086-2dd4-4d02-aac3-1bbb4a404784" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.540870] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired lock "refresh_cache-66ba8086-2dd4-4d02-aac3-1bbb4a404784" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.542374] env[69927]: DEBUG nova.network.neutron [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 866.549690] env[69927]: DEBUG oslo_vmware.rw_handles [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52dc702a-089c-69f0-88b2-d4991f199d18/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 866.550735] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bcdce27-9f90-46a0-b144-9830a20aca76 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.561089] env[69927]: DEBUG oslo_vmware.rw_handles [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52dc702a-089c-69f0-88b2-d4991f199d18/disk-0.vmdk is in state: ready. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 866.561369] env[69927]: ERROR oslo_vmware.rw_handles [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52dc702a-089c-69f0-88b2-d4991f199d18/disk-0.vmdk due to incomplete transfer. [ 866.561651] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-99732838-5438-4b63-9add-717438b9bfa2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.572164] env[69927]: DEBUG oslo_vmware.rw_handles [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52dc702a-089c-69f0-88b2-d4991f199d18/disk-0.vmdk. {{(pid=69927) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 866.572244] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Uploaded image 3400d513-87f1-466e-b71f-d465ac6ba592 to the Glance image server {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 866.574675] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Destroying the VM {{(pid=69927) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 866.576343] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-48765b25-16fe-4fe3-8fab-c24c50ee908a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.582955] env[69927]: DEBUG oslo_vmware.api [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 866.582955] env[69927]: value = "task-4095917" [ 866.582955] env[69927]: _type = "Task" [ 866.582955] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.597053] env[69927]: DEBUG oslo_vmware.api [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095917, 'name': Destroy_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.752586] env[69927]: DEBUG nova.compute.utils [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 866.754484] env[69927]: DEBUG nova.compute.manager [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 866.754660] env[69927]: DEBUG nova.network.neutron [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 866.781424] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bcde0ca9-2ec6-429f-bfec-0be2d3d08d8f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "c3e8a429-8484-4b11-abe3-1cccf0992556" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.431s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 866.815160] env[69927]: DEBUG nova.policy [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de8b1b11969a4feb818dc682d2fec552', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61b1aea0ccf049c8942ba32932412497', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 866.933145] env[69927]: DEBUG oslo_vmware.api [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095916, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.094555] env[69927]: DEBUG oslo_vmware.api [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095917, 'name': Destroy_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.122810] env[69927]: DEBUG nova.network.neutron [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 867.153892] env[69927]: DEBUG nova.network.neutron [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Successfully created port: b590e270-89a9-47f6-8e4e-69b428b381a8 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 867.258038] env[69927]: DEBUG nova.compute.manager [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 867.284959] env[69927]: DEBUG nova.compute.manager [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 867.358092] env[69927]: DEBUG nova.network.neutron [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Updating instance_info_cache with network_info: [{"id": "0a3c7cb5-f42d-407f-8561-e9c2695bced8", "address": "fa:16:3e:46:1f:a0", "network": {"id": "2cac2664-2b1d-4bb6-a58a-f8e96679d038", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1945522269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3afde63c8cbe4aecb32a470fd6b948f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a3c7cb5-f4", "ovs_interfaceid": "0a3c7cb5-f42d-407f-8561-e9c2695bced8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.433820] env[69927]: DEBUG oslo_vmware.api [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095916, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.597898} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.434155] env[69927]: INFO nova.virt.vmwareapi.ds_util [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 6be47dcb-ce00-4b81-9e69-35acabac046e/f524494e-9179-4b3e-a3e2-782f019def24-rescue.vmdk. [ 867.434948] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c508d9aa-4aa8-4470-ac11-e04c18babde3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.472221] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] 6be47dcb-ce00-4b81-9e69-35acabac046e/f524494e-9179-4b3e-a3e2-782f019def24-rescue.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 867.472221] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6567baba-d28a-480b-801b-402044c8bf4f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.491958] env[69927]: DEBUG oslo_vmware.api [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 867.491958] env[69927]: value = "task-4095918" [ 867.491958] env[69927]: _type = "Task" [ 867.491958] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.503702] env[69927]: DEBUG oslo_vmware.api [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095918, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.597983] env[69927]: DEBUG oslo_vmware.api [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095917, 'name': Destroy_Task, 'duration_secs': 0.540525} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.600023] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Destroyed the VM [ 867.600023] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Deleting Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 867.600023] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-752a341c-5b34-429c-ac98-cfc3ea715b96 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.610201] env[69927]: DEBUG oslo_vmware.api [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 867.610201] env[69927]: value = "task-4095919" [ 867.610201] env[69927]: _type = "Task" [ 867.610201] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.630612] env[69927]: DEBUG oslo_vmware.api [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095919, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.810736] env[69927]: DEBUG oslo_concurrency.lockutils [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 867.861898] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Releasing lock "refresh_cache-66ba8086-2dd4-4d02-aac3-1bbb4a404784" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.862055] env[69927]: DEBUG nova.compute.manager [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Instance network_info: |[{"id": "0a3c7cb5-f42d-407f-8561-e9c2695bced8", "address": "fa:16:3e:46:1f:a0", "network": {"id": "2cac2664-2b1d-4bb6-a58a-f8e96679d038", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1945522269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3afde63c8cbe4aecb32a470fd6b948f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a3c7cb5-f4", "ovs_interfaceid": "0a3c7cb5-f42d-407f-8561-e9c2695bced8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 867.862464] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:1f:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '130387c4-e4ec-4d95-8e9d-bb079baabad8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0a3c7cb5-f42d-407f-8561-e9c2695bced8', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 867.870634] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 867.872527] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 867.873325] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-005c1ff5-37c7-4422-98d9-b27761028060 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.876775] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2aec6952-9040-41dd-8197-9e04b9945fa4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.898697] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cab3123-f9cb-41e5-b0c9-999de76c3d73 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.904286] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 867.904286] env[69927]: value = "task-4095920" [ 867.904286] env[69927]: _type = "Task" [ 867.904286] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.938915] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea768ac3-0726-46ce-ab68-5b3769f5d959 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.941836] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095920, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.947583] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c0828c-036c-4779-aecc-d263f79d223d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.963535] env[69927]: DEBUG nova.compute.provider_tree [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 868.003814] env[69927]: DEBUG oslo_vmware.api [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095918, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.064109] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "80fc9add-683b-424e-9876-cdcae664e2da" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.064630] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "80fc9add-683b-424e-9876-cdcae664e2da" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.064978] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "80fc9add-683b-424e-9876-cdcae664e2da-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.065340] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "80fc9add-683b-424e-9876-cdcae664e2da-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.065551] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "80fc9add-683b-424e-9876-cdcae664e2da-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.069850] env[69927]: INFO nova.compute.manager [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Terminating instance [ 868.128922] env[69927]: DEBUG oslo_vmware.api [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095919, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.253939] env[69927]: DEBUG nova.compute.manager [req-042c8dc3-4152-412f-8b6b-6f6f81b4f34e req-92f1f6cb-d268-45e1-a764-0f4a4c8d5cf1 service nova] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Received event network-changed-0a3c7cb5-f42d-407f-8561-e9c2695bced8 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 868.253939] env[69927]: DEBUG nova.compute.manager [req-042c8dc3-4152-412f-8b6b-6f6f81b4f34e req-92f1f6cb-d268-45e1-a764-0f4a4c8d5cf1 service nova] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Refreshing instance network info cache due to event network-changed-0a3c7cb5-f42d-407f-8561-e9c2695bced8. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 868.254310] env[69927]: DEBUG oslo_concurrency.lockutils [req-042c8dc3-4152-412f-8b6b-6f6f81b4f34e req-92f1f6cb-d268-45e1-a764-0f4a4c8d5cf1 service nova] Acquiring lock "refresh_cache-66ba8086-2dd4-4d02-aac3-1bbb4a404784" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.254971] env[69927]: DEBUG oslo_concurrency.lockutils [req-042c8dc3-4152-412f-8b6b-6f6f81b4f34e req-92f1f6cb-d268-45e1-a764-0f4a4c8d5cf1 service nova] Acquired lock "refresh_cache-66ba8086-2dd4-4d02-aac3-1bbb4a404784" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.254971] env[69927]: DEBUG nova.network.neutron [req-042c8dc3-4152-412f-8b6b-6f6f81b4f34e req-92f1f6cb-d268-45e1-a764-0f4a4c8d5cf1 service nova] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Refreshing network info cache for port 0a3c7cb5-f42d-407f-8561-e9c2695bced8 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 868.276742] env[69927]: DEBUG nova.compute.manager [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 868.314920] env[69927]: DEBUG nova.virt.hardware [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 868.315434] env[69927]: DEBUG nova.virt.hardware [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 868.315704] env[69927]: DEBUG nova.virt.hardware [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 868.315980] env[69927]: DEBUG nova.virt.hardware [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 868.316248] env[69927]: DEBUG nova.virt.hardware [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 868.316547] env[69927]: DEBUG nova.virt.hardware [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 868.316967] env[69927]: DEBUG nova.virt.hardware [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 868.317512] env[69927]: DEBUG nova.virt.hardware [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 868.317615] env[69927]: DEBUG nova.virt.hardware [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 868.317904] env[69927]: DEBUG nova.virt.hardware [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 868.318218] env[69927]: DEBUG nova.virt.hardware [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 868.320204] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8afc66d5-3da6-4a8c-bfb0-090a72b7235a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.334897] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85deb71c-ba3f-4701-b5f3-4d9b1f207d50 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.417974] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095920, 'name': CreateVM_Task, 'duration_secs': 0.452857} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.418191] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 868.418940] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.419959] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.419959] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 868.419959] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f11051a-58ee-401b-bc91-6338a46c467b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.425108] env[69927]: DEBUG oslo_vmware.api [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 868.425108] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5261dad1-9cdd-f71f-a8bb-6cf64a375a05" [ 868.425108] env[69927]: _type = "Task" [ 868.425108] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.435295] env[69927]: DEBUG oslo_vmware.api [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5261dad1-9cdd-f71f-a8bb-6cf64a375a05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.467802] env[69927]: DEBUG nova.scheduler.client.report [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 868.503405] env[69927]: DEBUG oslo_vmware.api [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095918, 'name': ReconfigVM_Task, 'duration_secs': 1.011019} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.504043] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Reconfigured VM instance instance-0000002e to attach disk [datastore2] 6be47dcb-ce00-4b81-9e69-35acabac046e/f524494e-9179-4b3e-a3e2-782f019def24-rescue.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 868.504494] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa878a77-2e63-4023-ab0a-98d47b69d679 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.532906] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be840113-46b8-49d8-89c5-9aa25d3f53f7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.549768] env[69927]: DEBUG oslo_vmware.api [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 868.549768] env[69927]: value = "task-4095921" [ 868.549768] env[69927]: _type = "Task" [ 868.549768] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.559654] env[69927]: DEBUG oslo_vmware.api [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095921, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.575165] env[69927]: DEBUG nova.compute.manager [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 868.575429] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 868.576390] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a4f06d-73a9-4f38-ba97-4e9b30797e20 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.584789] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 868.585072] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-efb6d7ad-1e0f-448f-83c0-967969df873b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.624320] env[69927]: DEBUG oslo_vmware.api [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095919, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.729252] env[69927]: DEBUG nova.network.neutron [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Successfully updated port: b590e270-89a9-47f6-8e4e-69b428b381a8 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 868.774695] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "44e81156-b0c7-4f68-9732-b39f41ebcd4b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.775116] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "44e81156-b0c7-4f68-9732-b39f41ebcd4b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.811165] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 868.811560] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 868.811848] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Deleting the datastore file [datastore1] 80fc9add-683b-424e-9876-cdcae664e2da {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 868.812206] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df1f2c99-5d3b-46b8-b8a3-b6bc147c0025 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.821062] env[69927]: DEBUG oslo_vmware.api [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 868.821062] env[69927]: value = "task-4095923" [ 868.821062] env[69927]: _type = "Task" [ 868.821062] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.834468] env[69927]: DEBUG oslo_vmware.api [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095923, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.838181] env[69927]: DEBUG nova.compute.manager [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Stashing vm_state: active {{(pid=69927) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 868.938792] env[69927]: DEBUG oslo_vmware.api [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5261dad1-9cdd-f71f-a8bb-6cf64a375a05, 'name': SearchDatastore_Task, 'duration_secs': 0.011053} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.938792] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 868.938792] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 868.938792] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.939109] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.939109] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 868.939109] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-46e9b8bb-0bbf-4a0b-92e7-b4d003a914c6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.950160] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 868.950160] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 868.950717] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d22d4903-a157-4386-8491-12d66081b6ff {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.956892] env[69927]: DEBUG oslo_vmware.api [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 868.956892] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5293802a-d0c7-1fe4-911c-0b1325667c6f" [ 868.956892] env[69927]: _type = "Task" [ 868.956892] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.969026] env[69927]: DEBUG oslo_vmware.api [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5293802a-d0c7-1fe4-911c-0b1325667c6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.974069] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.731s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.974652] env[69927]: DEBUG nova.compute.manager [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 868.977647] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.701s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.977923] env[69927]: DEBUG nova.objects.instance [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Lazy-loading 'resources' on Instance uuid a4249857-6f60-4040-b676-d2d19dc83f15 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 869.006063] env[69927]: DEBUG nova.network.neutron [req-042c8dc3-4152-412f-8b6b-6f6f81b4f34e req-92f1f6cb-d268-45e1-a764-0f4a4c8d5cf1 service nova] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Updated VIF entry in instance network info cache for port 0a3c7cb5-f42d-407f-8561-e9c2695bced8. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 869.006825] env[69927]: DEBUG nova.network.neutron [req-042c8dc3-4152-412f-8b6b-6f6f81b4f34e req-92f1f6cb-d268-45e1-a764-0f4a4c8d5cf1 service nova] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Updating instance_info_cache with network_info: [{"id": "0a3c7cb5-f42d-407f-8561-e9c2695bced8", "address": "fa:16:3e:46:1f:a0", "network": {"id": "2cac2664-2b1d-4bb6-a58a-f8e96679d038", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1945522269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3afde63c8cbe4aecb32a470fd6b948f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a3c7cb5-f4", "ovs_interfaceid": "0a3c7cb5-f42d-407f-8561-e9c2695bced8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.060899] env[69927]: DEBUG oslo_vmware.api [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095921, 'name': ReconfigVM_Task, 'duration_secs': 0.196729} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.061222] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 869.061488] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-535478b3-28d3-46b4-9f3e-68a445d23d73 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.068934] env[69927]: DEBUG oslo_vmware.api [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 869.068934] env[69927]: value = "task-4095924" [ 869.068934] env[69927]: _type = "Task" [ 869.068934] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.078825] env[69927]: DEBUG oslo_vmware.api [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095924, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.122285] env[69927]: DEBUG oslo_vmware.api [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095919, 'name': RemoveSnapshot_Task, 'duration_secs': 1.042752} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.123763] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Deleted Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 869.123763] env[69927]: INFO nova.compute.manager [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Took 18.01 seconds to snapshot the instance on the hypervisor. [ 869.232187] env[69927]: DEBUG oslo_concurrency.lockutils [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "refresh_cache-b67630a4-2c1a-440b-af82-80c908ffa6e9" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.232381] env[69927]: DEBUG oslo_concurrency.lockutils [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired lock "refresh_cache-b67630a4-2c1a-440b-af82-80c908ffa6e9" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 869.232528] env[69927]: DEBUG nova.network.neutron [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 869.330879] env[69927]: DEBUG oslo_vmware.api [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4095923, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178047} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.331161] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 869.331271] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 869.331457] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 869.331631] env[69927]: INFO nova.compute.manager [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Took 0.76 seconds to destroy the instance on the hypervisor. [ 869.331929] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 869.332174] env[69927]: DEBUG nova.compute.manager [-] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 869.332437] env[69927]: DEBUG nova.network.neutron [-] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 869.369567] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.474334] env[69927]: DEBUG oslo_vmware.api [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5293802a-d0c7-1fe4-911c-0b1325667c6f, 'name': SearchDatastore_Task, 'duration_secs': 0.010184} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.474334] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e3d5305-e959-4820-8bdd-5cbb3e9c6989 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.483487] env[69927]: DEBUG nova.compute.utils [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 869.485838] env[69927]: DEBUG oslo_vmware.api [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 869.485838] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]524eed56-1cc9-ec20-2e6c-ff1f781020c8" [ 869.485838] env[69927]: _type = "Task" [ 869.485838] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.490316] env[69927]: DEBUG nova.compute.manager [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 869.490316] env[69927]: DEBUG nova.network.neutron [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 869.502693] env[69927]: DEBUG oslo_vmware.api [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]524eed56-1cc9-ec20-2e6c-ff1f781020c8, 'name': SearchDatastore_Task, 'duration_secs': 0.010886} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.503219] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.503625] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 66ba8086-2dd4-4d02-aac3-1bbb4a404784/66ba8086-2dd4-4d02-aac3-1bbb4a404784.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 869.504133] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1614da9e-4f91-4959-8679-634b64630e6f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.509807] env[69927]: DEBUG oslo_concurrency.lockutils [req-042c8dc3-4152-412f-8b6b-6f6f81b4f34e req-92f1f6cb-d268-45e1-a764-0f4a4c8d5cf1 service nova] Releasing lock "refresh_cache-66ba8086-2dd4-4d02-aac3-1bbb4a404784" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.516020] env[69927]: DEBUG oslo_vmware.api [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 869.516020] env[69927]: value = "task-4095925" [ 869.516020] env[69927]: _type = "Task" [ 869.516020] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.527788] env[69927]: DEBUG oslo_vmware.api [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095925, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.582172] env[69927]: DEBUG nova.policy [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '948bb5c1ffc54cecb2d4ae1e5c98c11c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3b7ae5270b0643e6b5720d4f2f765d74', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 869.586589] env[69927]: DEBUG oslo_vmware.api [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095924, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.626701] env[69927]: DEBUG nova.compute.manager [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Instance disappeared during snapshot {{(pid=69927) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 869.646238] env[69927]: DEBUG nova.compute.manager [None req-7e47ce7b-a664-4fd9-b692-00c82e6e1df5 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Image not found during clean up 3400d513-87f1-466e-b71f-d465ac6ba592 {{(pid=69927) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 869.794027] env[69927]: DEBUG nova.network.neutron [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 869.986931] env[69927]: DEBUG nova.compute.manager [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 870.010012] env[69927]: DEBUG nova.network.neutron [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Updating instance_info_cache with network_info: [{"id": "b590e270-89a9-47f6-8e4e-69b428b381a8", "address": "fa:16:3e:17:29:c4", "network": {"id": "625b5644-8a8f-4789-8c96-083982c1bf4c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-124597975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61b1aea0ccf049c8942ba32932412497", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb590e270-89", "ovs_interfaceid": "b590e270-89a9-47f6-8e4e-69b428b381a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.032667] env[69927]: DEBUG oslo_vmware.api [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095925, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.087506] env[69927]: DEBUG oslo_vmware.api [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095924, 'name': PowerOnVM_Task, 'duration_secs': 0.748201} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.087933] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 870.092082] env[69927]: DEBUG nova.compute.manager [None req-72944a71-4e5c-4c09-ac8c-6c4c3d9746dd tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 870.092933] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c1f2c5-c975-4674-a3e8-65337c913703 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.165925] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d625c3f6-a623-489f-a938-0761c6568b1b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.174927] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85070811-24f8-46be-a14a-2594a42ab891 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.210091] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa4d4f0-90d8-4fe5-9fbb-3fef16b33ad9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.218703] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0789c7df-a67a-43fa-a5d2-9fb43ebdb8b7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.236201] env[69927]: DEBUG nova.compute.provider_tree [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 870.252279] env[69927]: DEBUG nova.network.neutron [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Successfully created port: 699de5cd-28f8-453d-8f0a-7856f2d6a2b0 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 870.285027] env[69927]: DEBUG nova.compute.manager [req-5446a8c6-4bbf-46e6-8c46-56f78f5a184d req-39e22d91-48c4-4bcd-905f-cbb3e07b1c6f service nova] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Received event network-vif-plugged-b590e270-89a9-47f6-8e4e-69b428b381a8 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 870.285027] env[69927]: DEBUG oslo_concurrency.lockutils [req-5446a8c6-4bbf-46e6-8c46-56f78f5a184d req-39e22d91-48c4-4bcd-905f-cbb3e07b1c6f service nova] Acquiring lock "b67630a4-2c1a-440b-af82-80c908ffa6e9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 870.285027] env[69927]: DEBUG oslo_concurrency.lockutils [req-5446a8c6-4bbf-46e6-8c46-56f78f5a184d req-39e22d91-48c4-4bcd-905f-cbb3e07b1c6f service nova] Lock "b67630a4-2c1a-440b-af82-80c908ffa6e9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 870.285713] env[69927]: DEBUG oslo_concurrency.lockutils [req-5446a8c6-4bbf-46e6-8c46-56f78f5a184d req-39e22d91-48c4-4bcd-905f-cbb3e07b1c6f service nova] Lock "b67630a4-2c1a-440b-af82-80c908ffa6e9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.286037] env[69927]: DEBUG nova.compute.manager [req-5446a8c6-4bbf-46e6-8c46-56f78f5a184d req-39e22d91-48c4-4bcd-905f-cbb3e07b1c6f service nova] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] No waiting events found dispatching network-vif-plugged-b590e270-89a9-47f6-8e4e-69b428b381a8 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 870.286275] env[69927]: WARNING nova.compute.manager [req-5446a8c6-4bbf-46e6-8c46-56f78f5a184d req-39e22d91-48c4-4bcd-905f-cbb3e07b1c6f service nova] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Received unexpected event network-vif-plugged-b590e270-89a9-47f6-8e4e-69b428b381a8 for instance with vm_state building and task_state spawning. [ 870.286509] env[69927]: DEBUG nova.compute.manager [req-5446a8c6-4bbf-46e6-8c46-56f78f5a184d req-39e22d91-48c4-4bcd-905f-cbb3e07b1c6f service nova] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Received event network-changed-b590e270-89a9-47f6-8e4e-69b428b381a8 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 870.286661] env[69927]: DEBUG nova.compute.manager [req-5446a8c6-4bbf-46e6-8c46-56f78f5a184d req-39e22d91-48c4-4bcd-905f-cbb3e07b1c6f service nova] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Refreshing instance network info cache due to event network-changed-b590e270-89a9-47f6-8e4e-69b428b381a8. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 870.287245] env[69927]: DEBUG oslo_concurrency.lockutils [req-5446a8c6-4bbf-46e6-8c46-56f78f5a184d req-39e22d91-48c4-4bcd-905f-cbb3e07b1c6f service nova] Acquiring lock "refresh_cache-b67630a4-2c1a-440b-af82-80c908ffa6e9" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.444804] env[69927]: DEBUG nova.network.neutron [-] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.515871] env[69927]: DEBUG oslo_concurrency.lockutils [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Releasing lock "refresh_cache-b67630a4-2c1a-440b-af82-80c908ffa6e9" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 870.516316] env[69927]: DEBUG nova.compute.manager [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Instance network_info: |[{"id": "b590e270-89a9-47f6-8e4e-69b428b381a8", "address": "fa:16:3e:17:29:c4", "network": {"id": "625b5644-8a8f-4789-8c96-083982c1bf4c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-124597975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61b1aea0ccf049c8942ba32932412497", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb590e270-89", "ovs_interfaceid": "b590e270-89a9-47f6-8e4e-69b428b381a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 870.516766] env[69927]: DEBUG oslo_concurrency.lockutils [req-5446a8c6-4bbf-46e6-8c46-56f78f5a184d req-39e22d91-48c4-4bcd-905f-cbb3e07b1c6f service nova] Acquired lock "refresh_cache-b67630a4-2c1a-440b-af82-80c908ffa6e9" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 870.517035] env[69927]: DEBUG nova.network.neutron [req-5446a8c6-4bbf-46e6-8c46-56f78f5a184d req-39e22d91-48c4-4bcd-905f-cbb3e07b1c6f service nova] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Refreshing network info cache for port b590e270-89a9-47f6-8e4e-69b428b381a8 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 870.518377] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:29:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '415e68b4-3766-4359-afe2-f8563910d98c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b590e270-89a9-47f6-8e4e-69b428b381a8', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 870.530375] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 870.535212] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 870.539333] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c39abe07-330f-4b28-b0c9-46d69c115bc8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.560614] env[69927]: DEBUG oslo_vmware.api [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095925, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.547397} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.562330] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 66ba8086-2dd4-4d02-aac3-1bbb4a404784/66ba8086-2dd4-4d02-aac3-1bbb4a404784.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 870.562625] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 870.562900] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 870.562900] env[69927]: value = "task-4095926" [ 870.562900] env[69927]: _type = "Task" [ 870.562900] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.563227] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2b3cdb4e-6dd2-4c6b-9faa-6b08531d9c57 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.576379] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095926, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.578401] env[69927]: DEBUG oslo_vmware.api [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 870.578401] env[69927]: value = "task-4095927" [ 870.578401] env[69927]: _type = "Task" [ 870.578401] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.588405] env[69927]: DEBUG oslo_vmware.api [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095927, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.759173] env[69927]: ERROR nova.scheduler.client.report [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] [req-afde40e1-9dba-4179-ad4b-1d58140d6b0c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2f529b36-df5f-4b37-8103-68f74f737726. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-afde40e1-9dba-4179-ad4b-1d58140d6b0c"}]} [ 870.777257] env[69927]: DEBUG nova.scheduler.client.report [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Refreshing inventories for resource provider 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 870.795687] env[69927]: DEBUG nova.scheduler.client.report [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Updating ProviderTree inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 870.795687] env[69927]: DEBUG nova.compute.provider_tree [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 870.810910] env[69927]: DEBUG nova.scheduler.client.report [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Refreshing aggregate associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, aggregates: None {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 870.831331] env[69927]: DEBUG nova.scheduler.client.report [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Refreshing trait associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 870.952677] env[69927]: INFO nova.compute.manager [-] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Took 1.62 seconds to deallocate network for instance. [ 871.001636] env[69927]: DEBUG nova.compute.manager [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 871.067568] env[69927]: DEBUG nova.virt.hardware [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 871.067864] env[69927]: DEBUG nova.virt.hardware [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 871.068016] env[69927]: DEBUG nova.virt.hardware [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 871.068206] env[69927]: DEBUG nova.virt.hardware [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 871.068352] env[69927]: DEBUG nova.virt.hardware [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 871.068499] env[69927]: DEBUG nova.virt.hardware [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 871.068708] env[69927]: DEBUG nova.virt.hardware [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 871.068866] env[69927]: DEBUG nova.virt.hardware [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 871.069045] env[69927]: DEBUG nova.virt.hardware [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 871.069214] env[69927]: DEBUG nova.virt.hardware [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 871.069405] env[69927]: DEBUG nova.virt.hardware [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 871.070739] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da948bbd-013c-4f02-81fe-e4cc9ceb23e3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.091871] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420c64ed-29bd-446a-9e66-97e2bb2a18a6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.096025] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095926, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.102059] env[69927]: DEBUG oslo_vmware.api [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095927, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.156121} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.102881] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 871.103735] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbefb63f-2c7b-46bf-b576-7b2a3b61b7ae {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.140911] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] 66ba8086-2dd4-4d02-aac3-1bbb4a404784/66ba8086-2dd4-4d02-aac3-1bbb4a404784.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 871.144417] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8308894c-ba78-4108-932a-88a1560b1b64 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.170560] env[69927]: DEBUG oslo_vmware.api [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 871.170560] env[69927]: value = "task-4095928" [ 871.170560] env[69927]: _type = "Task" [ 871.170560] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.182578] env[69927]: DEBUG oslo_vmware.api [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095928, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.338051] env[69927]: DEBUG nova.network.neutron [req-5446a8c6-4bbf-46e6-8c46-56f78f5a184d req-39e22d91-48c4-4bcd-905f-cbb3e07b1c6f service nova] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Updated VIF entry in instance network info cache for port b590e270-89a9-47f6-8e4e-69b428b381a8. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 871.338440] env[69927]: DEBUG nova.network.neutron [req-5446a8c6-4bbf-46e6-8c46-56f78f5a184d req-39e22d91-48c4-4bcd-905f-cbb3e07b1c6f service nova] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Updating instance_info_cache with network_info: [{"id": "b590e270-89a9-47f6-8e4e-69b428b381a8", "address": "fa:16:3e:17:29:c4", "network": {"id": "625b5644-8a8f-4789-8c96-083982c1bf4c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-124597975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61b1aea0ccf049c8942ba32932412497", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb590e270-89", "ovs_interfaceid": "b590e270-89a9-47f6-8e4e-69b428b381a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.460034] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-127089a4-146f-4797-9bf2-acf57b59f4fb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.463104] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.470298] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e9a6dfe-719c-4bc4-be7a-9bd568f7e228 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.512810] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c7cded-537a-43a6-a019-e0a921e1e744 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.526667] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80923656-1920-4ee8-b99c-b652b9421f31 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.544752] env[69927]: DEBUG nova.compute.provider_tree [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 871.580855] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095926, 'name': CreateVM_Task, 'duration_secs': 0.846096} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.581040] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 871.581885] env[69927]: DEBUG oslo_concurrency.lockutils [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.582101] env[69927]: DEBUG oslo_concurrency.lockutils [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 871.582479] env[69927]: DEBUG oslo_concurrency.lockutils [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 871.582806] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-733012ae-a392-4196-9818-4239dc3a0255 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.588327] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 871.588327] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d079b8-9562-9d96-5d8e-8b98a7bca302" [ 871.588327] env[69927]: _type = "Task" [ 871.588327] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.597361] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d079b8-9562-9d96-5d8e-8b98a7bca302, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.681848] env[69927]: DEBUG oslo_vmware.api [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095928, 'name': ReconfigVM_Task, 'duration_secs': 0.382262} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.682159] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Reconfigured VM instance instance-00000030 to attach disk [datastore2] 66ba8086-2dd4-4d02-aac3-1bbb4a404784/66ba8086-2dd4-4d02-aac3-1bbb4a404784.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 871.682836] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f2d1fdc2-40f6-4df4-b39b-6dbbdd0382ff {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.691134] env[69927]: DEBUG oslo_vmware.api [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 871.691134] env[69927]: value = "task-4095929" [ 871.691134] env[69927]: _type = "Task" [ 871.691134] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.700324] env[69927]: DEBUG oslo_vmware.api [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095929, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.842696] env[69927]: DEBUG oslo_concurrency.lockutils [req-5446a8c6-4bbf-46e6-8c46-56f78f5a184d req-39e22d91-48c4-4bcd-905f-cbb3e07b1c6f service nova] Releasing lock "refresh_cache-b67630a4-2c1a-440b-af82-80c908ffa6e9" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 871.842889] env[69927]: DEBUG nova.compute.manager [req-5446a8c6-4bbf-46e6-8c46-56f78f5a184d req-39e22d91-48c4-4bcd-905f-cbb3e07b1c6f service nova] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Received event network-vif-deleted-d0780c40-8a68-4d93-938c-96312b4436ec {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 871.843099] env[69927]: INFO nova.compute.manager [req-5446a8c6-4bbf-46e6-8c46-56f78f5a184d req-39e22d91-48c4-4bcd-905f-cbb3e07b1c6f service nova] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Neutron deleted interface d0780c40-8a68-4d93-938c-96312b4436ec; detaching it from the instance and deleting it from the info cache [ 871.843339] env[69927]: DEBUG nova.network.neutron [req-5446a8c6-4bbf-46e6-8c46-56f78f5a184d req-39e22d91-48c4-4bcd-905f-cbb3e07b1c6f service nova] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.022713] env[69927]: INFO nova.compute.manager [None req-4c976340-a7c8-498e-964d-b19a4755482b tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Unrescuing [ 872.023195] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4c976340-a7c8-498e-964d-b19a4755482b tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquiring lock "refresh_cache-6be47dcb-ce00-4b81-9e69-35acabac046e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.023435] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4c976340-a7c8-498e-964d-b19a4755482b tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquired lock "refresh_cache-6be47dcb-ce00-4b81-9e69-35acabac046e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 872.023818] env[69927]: DEBUG nova.network.neutron [None req-4c976340-a7c8-498e-964d-b19a4755482b tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 872.111258] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d079b8-9562-9d96-5d8e-8b98a7bca302, 'name': SearchDatastore_Task, 'duration_secs': 0.010342} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.111613] env[69927]: DEBUG oslo_concurrency.lockutils [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 872.113305] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 872.113305] env[69927]: DEBUG oslo_concurrency.lockutils [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.113305] env[69927]: DEBUG oslo_concurrency.lockutils [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 872.113305] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 872.113305] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-16903801-fc86-4935-a411-56526c679ef0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.125440] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 872.125440] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 872.125440] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41db3e0d-6279-4520-bacb-bd054af0b054 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.129930] env[69927]: DEBUG nova.network.neutron [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Successfully updated port: 699de5cd-28f8-453d-8f0a-7856f2d6a2b0 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 872.137797] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 872.137797] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5245f76f-4d68-ebea-d749-f3c863381d44" [ 872.137797] env[69927]: _type = "Task" [ 872.137797] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.148557] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5245f76f-4d68-ebea-d749-f3c863381d44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.150480] env[69927]: DEBUG nova.scheduler.client.report [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Updated inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 with generation 80 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 872.150480] env[69927]: DEBUG nova.compute.provider_tree [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Updating resource provider 2f529b36-df5f-4b37-8103-68f74f737726 generation from 80 to 81 during operation: update_inventory {{(pid=69927) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 872.150480] env[69927]: DEBUG nova.compute.provider_tree [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 872.192983] env[69927]: DEBUG nova.compute.manager [req-6890a3ff-9b73-4a97-88dd-573be94f10e5 req-4f9d2a71-0ad1-4a51-a59c-8a3f65fd7aba service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Received event network-vif-plugged-699de5cd-28f8-453d-8f0a-7856f2d6a2b0 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 872.193124] env[69927]: DEBUG oslo_concurrency.lockutils [req-6890a3ff-9b73-4a97-88dd-573be94f10e5 req-4f9d2a71-0ad1-4a51-a59c-8a3f65fd7aba service nova] Acquiring lock "a2b1684f-82af-42fc-925e-db36f31cfe63-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 872.193698] env[69927]: DEBUG oslo_concurrency.lockutils [req-6890a3ff-9b73-4a97-88dd-573be94f10e5 req-4f9d2a71-0ad1-4a51-a59c-8a3f65fd7aba service nova] Lock "a2b1684f-82af-42fc-925e-db36f31cfe63-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.193698] env[69927]: DEBUG oslo_concurrency.lockutils [req-6890a3ff-9b73-4a97-88dd-573be94f10e5 req-4f9d2a71-0ad1-4a51-a59c-8a3f65fd7aba service nova] Lock "a2b1684f-82af-42fc-925e-db36f31cfe63-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 872.193698] env[69927]: DEBUG nova.compute.manager [req-6890a3ff-9b73-4a97-88dd-573be94f10e5 req-4f9d2a71-0ad1-4a51-a59c-8a3f65fd7aba service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] No waiting events found dispatching network-vif-plugged-699de5cd-28f8-453d-8f0a-7856f2d6a2b0 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 872.193874] env[69927]: WARNING nova.compute.manager [req-6890a3ff-9b73-4a97-88dd-573be94f10e5 req-4f9d2a71-0ad1-4a51-a59c-8a3f65fd7aba service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Received unexpected event network-vif-plugged-699de5cd-28f8-453d-8f0a-7856f2d6a2b0 for instance with vm_state building and task_state spawning. [ 872.205772] env[69927]: DEBUG oslo_vmware.api [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095929, 'name': Rename_Task, 'duration_secs': 0.158903} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.206192] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 872.206368] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b1473381-7653-403d-a48a-39b02442994d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.216377] env[69927]: DEBUG oslo_vmware.api [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 872.216377] env[69927]: value = "task-4095930" [ 872.216377] env[69927]: _type = "Task" [ 872.216377] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.227092] env[69927]: DEBUG oslo_vmware.api [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095930, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.346320] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b13de547-f270-445a-a848-d7a13900f7ad {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.357879] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6d279b8-afdb-43e2-96f9-6b5e91fb2da5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.394678] env[69927]: DEBUG nova.compute.manager [req-5446a8c6-4bbf-46e6-8c46-56f78f5a184d req-39e22d91-48c4-4bcd-905f-cbb3e07b1c6f service nova] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Detach interface failed, port_id=d0780c40-8a68-4d93-938c-96312b4436ec, reason: Instance 80fc9add-683b-424e-9876-cdcae664e2da could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 872.633174] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "refresh_cache-a2b1684f-82af-42fc-925e-db36f31cfe63" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.633428] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquired lock "refresh_cache-a2b1684f-82af-42fc-925e-db36f31cfe63" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 872.633526] env[69927]: DEBUG nova.network.neutron [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 872.648918] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5245f76f-4d68-ebea-d749-f3c863381d44, 'name': SearchDatastore_Task, 'duration_secs': 0.012244} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.649801] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce2c1df8-aa5a-4f03-8836-bc1f190f13f5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.656373] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.679s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 872.660731] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.113s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.662173] env[69927]: INFO nova.compute.claims [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 872.665975] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 872.665975] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5292f668-8958-ced6-59e1-c50796b506f4" [ 872.665975] env[69927]: _type = "Task" [ 872.665975] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.676857] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5292f668-8958-ced6-59e1-c50796b506f4, 'name': SearchDatastore_Task, 'duration_secs': 0.013009} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.677055] env[69927]: DEBUG oslo_concurrency.lockutils [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 872.677192] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] b67630a4-2c1a-440b-af82-80c908ffa6e9/b67630a4-2c1a-440b-af82-80c908ffa6e9.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 872.677467] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fee512c8-1da1-46a4-b903-20b13f73b648 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.685441] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 872.685441] env[69927]: value = "task-4095931" [ 872.685441] env[69927]: _type = "Task" [ 872.685441] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.695433] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095931, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.728148] env[69927]: DEBUG oslo_vmware.api [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095930, 'name': PowerOnVM_Task, 'duration_secs': 0.504776} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.728437] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 872.728635] env[69927]: INFO nova.compute.manager [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Took 7.12 seconds to spawn the instance on the hypervisor. [ 872.728814] env[69927]: DEBUG nova.compute.manager [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 872.729881] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8afed39a-9c9e-40c8-ab3c-30efa1765c89 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.733599] env[69927]: INFO nova.scheduler.client.report [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Deleted allocations for instance a4249857-6f60-4040-b676-d2d19dc83f15 [ 873.016665] env[69927]: DEBUG nova.network.neutron [None req-4c976340-a7c8-498e-964d-b19a4755482b tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Updating instance_info_cache with network_info: [{"id": "ff1455ee-4ab2-4e4d-ac72-6a6554002936", "address": "fa:16:3e:06:ea:cd", "network": {"id": "f5504ce1-4b4d-41f8-90bc-de06d36f55a7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-747484643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "5e5c81a6491a4090b807b7328df7d8ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d650b26-c3e7-4de7-98db-5e4b816d123a", "external-id": "nsx-vlan-transportzone-757", "segmentation_id": 757, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff1455ee-4a", "ovs_interfaceid": "ff1455ee-4ab2-4e4d-ac72-6a6554002936", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 873.199172] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095931, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.243482] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f13bd034-4edd-4645-bc2e-afe57589eabf tempest-ServerShowV247Test-118843938 tempest-ServerShowV247Test-118843938-project-member] Lock "a4249857-6f60-4040-b676-d2d19dc83f15" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.436s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.264844] env[69927]: INFO nova.compute.manager [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Took 44.71 seconds to build instance. [ 873.288250] env[69927]: DEBUG nova.network.neutron [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 873.437439] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 873.437439] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 873.520915] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4c976340-a7c8-498e-964d-b19a4755482b tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Releasing lock "refresh_cache-6be47dcb-ce00-4b81-9e69-35acabac046e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 873.520915] env[69927]: DEBUG nova.objects.instance [None req-4c976340-a7c8-498e-964d-b19a4755482b tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Lazy-loading 'flavor' on Instance uuid 6be47dcb-ce00-4b81-9e69-35acabac046e {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 873.653458] env[69927]: DEBUG nova.network.neutron [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Updating instance_info_cache with network_info: [{"id": "699de5cd-28f8-453d-8f0a-7856f2d6a2b0", "address": "fa:16:3e:87:8d:a8", "network": {"id": "4a19c4d6-f840-486a-b113-3ec076b5b34d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-437129762-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b7ae5270b0643e6b5720d4f2f765d74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "510d3c47-3615-43d5-aa5d-a279fd915e71", "external-id": "nsx-vlan-transportzone-436", "segmentation_id": 436, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap699de5cd-28", "ovs_interfaceid": "699de5cd-28f8-453d-8f0a-7856f2d6a2b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 873.698038] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095931, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.863063} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.698289] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] b67630a4-2c1a-440b-af82-80c908ffa6e9/b67630a4-2c1a-440b-af82-80c908ffa6e9.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 873.698550] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 873.699248] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7c5a2248-b4b0-4bfd-8aca-727f4435dab8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.707697] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 873.707697] env[69927]: value = "task-4095932" [ 873.707697] env[69927]: _type = "Task" [ 873.707697] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.718801] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095932, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.768860] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acc5888d-c835-47f3-8f87-e0d308dd93ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "66ba8086-2dd4-4d02-aac3-1bbb4a404784" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.676s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.951133] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 873.951133] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 873.951247] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 873.955037] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 873.955037] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 873.955037] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 873.955037] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69927) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 873.955037] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager.update_available_resource {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 874.032803] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f02d099-b2c7-470c-a315-955865026444 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.068393] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c976340-a7c8-498e-964d-b19a4755482b tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 874.073431] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d8056a2b-4494-4812-b32d-3c1e4d19a291 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.081411] env[69927]: DEBUG oslo_vmware.api [None req-4c976340-a7c8-498e-964d-b19a4755482b tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 874.081411] env[69927]: value = "task-4095933" [ 874.081411] env[69927]: _type = "Task" [ 874.081411] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.095719] env[69927]: DEBUG oslo_vmware.api [None req-4c976340-a7c8-498e-964d-b19a4755482b tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095933, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.156663] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Releasing lock "refresh_cache-a2b1684f-82af-42fc-925e-db36f31cfe63" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 874.157028] env[69927]: DEBUG nova.compute.manager [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Instance network_info: |[{"id": "699de5cd-28f8-453d-8f0a-7856f2d6a2b0", "address": "fa:16:3e:87:8d:a8", "network": {"id": "4a19c4d6-f840-486a-b113-3ec076b5b34d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-437129762-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b7ae5270b0643e6b5720d4f2f765d74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "510d3c47-3615-43d5-aa5d-a279fd915e71", "external-id": "nsx-vlan-transportzone-436", "segmentation_id": 436, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap699de5cd-28", "ovs_interfaceid": "699de5cd-28f8-453d-8f0a-7856f2d6a2b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 874.157490] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:8d:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '510d3c47-3615-43d5-aa5d-a279fd915e71', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '699de5cd-28f8-453d-8f0a-7856f2d6a2b0', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 874.165623] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Creating folder: Project (3b7ae5270b0643e6b5720d4f2f765d74). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 874.173585] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bdf97eed-393c-4fab-982b-f78f9eea0714 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.202824] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Created folder: Project (3b7ae5270b0643e6b5720d4f2f765d74) in parent group-v811283. [ 874.202824] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Creating folder: Instances. Parent ref: group-v811433. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 874.202824] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-184e9e46-8308-4e9f-809d-beb0a30261a7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.204216] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Created folder: Instances in parent group-v811433. [ 874.204533] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 874.207594] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 874.208095] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8ff03290-2eca-48f5-b93c-3b460e4da856 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.243779] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095932, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.341538} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.245858] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 874.246245] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 874.246245] env[69927]: value = "task-4095936" [ 874.246245] env[69927]: _type = "Task" [ 874.246245] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.246973] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-912d18cf-665b-49f5-9322-e9190111dfeb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.267320] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095936, 'name': CreateVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.280490] env[69927]: DEBUG nova.compute.manager [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 874.300052] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] b67630a4-2c1a-440b-af82-80c908ffa6e9/b67630a4-2c1a-440b-af82-80c908ffa6e9.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 874.303773] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-937371dd-4fd6-42c3-af6a-86ad624dee45 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.328348] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 874.328348] env[69927]: value = "task-4095937" [ 874.328348] env[69927]: _type = "Task" [ 874.328348] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.339982] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095937, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.472418] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.596903] env[69927]: DEBUG oslo_vmware.api [None req-4c976340-a7c8-498e-964d-b19a4755482b tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095933, 'name': PowerOffVM_Task, 'duration_secs': 0.292913} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.597241] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c976340-a7c8-498e-964d-b19a4755482b tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 874.603304] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c976340-a7c8-498e-964d-b19a4755482b tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Reconfiguring VM instance instance-0000002e to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 874.604177] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9860cb34-2300-4e5a-ad2b-147dbc0163e4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.625886] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-699d57cb-4cd2-4603-817c-a5f664a8be5b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.636739] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e56c0c1e-1d4f-4fa3-847c-b97cee3ff67a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.640525] env[69927]: DEBUG oslo_vmware.api [None req-4c976340-a7c8-498e-964d-b19a4755482b tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 874.640525] env[69927]: value = "task-4095938" [ 874.640525] env[69927]: _type = "Task" [ 874.640525] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.682798] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a66eb07-4889-4edc-8afd-fed111cf4aa0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.692382] env[69927]: DEBUG oslo_vmware.api [None req-4c976340-a7c8-498e-964d-b19a4755482b tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095938, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.698990] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e251040-9e67-4a59-a275-6244a2ac0033 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.716446] env[69927]: DEBUG nova.compute.provider_tree [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 874.761266] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095936, 'name': CreateVM_Task, 'duration_secs': 0.391242} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.761512] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 874.762196] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.762416] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 874.762749] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 874.763040] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f27ba8c-052f-429b-b0f7-a696adb68cdd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.768660] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 874.768660] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]527b4da3-97e0-8cec-ab50-9ee62d21bbcc" [ 874.768660] env[69927]: _type = "Task" [ 874.768660] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.778137] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]527b4da3-97e0-8cec-ab50-9ee62d21bbcc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.817952] env[69927]: DEBUG oslo_concurrency.lockutils [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.836471] env[69927]: DEBUG nova.compute.manager [req-a62b0dba-dbd0-412b-97bf-8bfcd6536978 req-05577c49-f038-4f58-afc9-27fa1cdd7d77 service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Received event network-changed-699de5cd-28f8-453d-8f0a-7856f2d6a2b0 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 874.836723] env[69927]: DEBUG nova.compute.manager [req-a62b0dba-dbd0-412b-97bf-8bfcd6536978 req-05577c49-f038-4f58-afc9-27fa1cdd7d77 service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Refreshing instance network info cache due to event network-changed-699de5cd-28f8-453d-8f0a-7856f2d6a2b0. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 874.836941] env[69927]: DEBUG oslo_concurrency.lockutils [req-a62b0dba-dbd0-412b-97bf-8bfcd6536978 req-05577c49-f038-4f58-afc9-27fa1cdd7d77 service nova] Acquiring lock "refresh_cache-a2b1684f-82af-42fc-925e-db36f31cfe63" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.837098] env[69927]: DEBUG oslo_concurrency.lockutils [req-a62b0dba-dbd0-412b-97bf-8bfcd6536978 req-05577c49-f038-4f58-afc9-27fa1cdd7d77 service nova] Acquired lock "refresh_cache-a2b1684f-82af-42fc-925e-db36f31cfe63" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 874.837274] env[69927]: DEBUG nova.network.neutron [req-a62b0dba-dbd0-412b-97bf-8bfcd6536978 req-05577c49-f038-4f58-afc9-27fa1cdd7d77 service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Refreshing network info cache for port 699de5cd-28f8-453d-8f0a-7856f2d6a2b0 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 874.843340] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095937, 'name': ReconfigVM_Task, 'duration_secs': 0.374135} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.843923] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Reconfigured VM instance instance-00000031 to attach disk [datastore1] b67630a4-2c1a-440b-af82-80c908ffa6e9/b67630a4-2c1a-440b-af82-80c908ffa6e9.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 874.844640] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ba9f720d-7e42-4394-bbde-01b1d0567e5d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.855510] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 874.855510] env[69927]: value = "task-4095939" [ 874.855510] env[69927]: _type = "Task" [ 874.855510] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.867103] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095939, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.152048] env[69927]: DEBUG oslo_vmware.api [None req-4c976340-a7c8-498e-964d-b19a4755482b tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095938, 'name': ReconfigVM_Task, 'duration_secs': 0.254179} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.152356] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c976340-a7c8-498e-964d-b19a4755482b tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Reconfigured VM instance instance-0000002e to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 875.152545] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c976340-a7c8-498e-964d-b19a4755482b tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 875.152795] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0956b698-dfed-4fa4-aa32-e295af111207 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.161049] env[69927]: DEBUG oslo_vmware.api [None req-4c976340-a7c8-498e-964d-b19a4755482b tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 875.161049] env[69927]: value = "task-4095940" [ 875.161049] env[69927]: _type = "Task" [ 875.161049] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.170140] env[69927]: DEBUG oslo_vmware.api [None req-4c976340-a7c8-498e-964d-b19a4755482b tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095940, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.280133] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]527b4da3-97e0-8cec-ab50-9ee62d21bbcc, 'name': SearchDatastore_Task, 'duration_secs': 0.016795} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.281093] env[69927]: DEBUG nova.scheduler.client.report [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Updated inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 with generation 81 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 875.281334] env[69927]: DEBUG nova.compute.provider_tree [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Updating resource provider 2f529b36-df5f-4b37-8103-68f74f737726 generation from 81 to 82 during operation: update_inventory {{(pid=69927) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 875.281520] env[69927]: DEBUG nova.compute.provider_tree [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 875.285888] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 875.285888] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 875.285888] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.286943] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 875.287173] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 875.287753] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd852060-30fd-4559-beab-6e34a7c13350 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.299920] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 875.299920] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 875.300688] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74659dea-0276-455c-b6dc-25dc4db5918d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.308909] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 875.308909] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525fa958-59d5-91f5-8b2f-cf4e63a54f94" [ 875.308909] env[69927]: _type = "Task" [ 875.308909] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.317315] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525fa958-59d5-91f5-8b2f-cf4e63a54f94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.369998] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095939, 'name': Rename_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.449504] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d2435e0e-9ca7-4a93-a0fd-8a065a7d47ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "66ba8086-2dd4-4d02-aac3-1bbb4a404784" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 875.449865] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d2435e0e-9ca7-4a93-a0fd-8a065a7d47ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "66ba8086-2dd4-4d02-aac3-1bbb4a404784" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 875.654504] env[69927]: DEBUG nova.network.neutron [req-a62b0dba-dbd0-412b-97bf-8bfcd6536978 req-05577c49-f038-4f58-afc9-27fa1cdd7d77 service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Updated VIF entry in instance network info cache for port 699de5cd-28f8-453d-8f0a-7856f2d6a2b0. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 875.655460] env[69927]: DEBUG nova.network.neutron [req-a62b0dba-dbd0-412b-97bf-8bfcd6536978 req-05577c49-f038-4f58-afc9-27fa1cdd7d77 service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Updating instance_info_cache with network_info: [{"id": "699de5cd-28f8-453d-8f0a-7856f2d6a2b0", "address": "fa:16:3e:87:8d:a8", "network": {"id": "4a19c4d6-f840-486a-b113-3ec076b5b34d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-437129762-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b7ae5270b0643e6b5720d4f2f765d74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "510d3c47-3615-43d5-aa5d-a279fd915e71", "external-id": "nsx-vlan-transportzone-436", "segmentation_id": 436, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap699de5cd-28", "ovs_interfaceid": "699de5cd-28f8-453d-8f0a-7856f2d6a2b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.671417] env[69927]: DEBUG oslo_vmware.api [None req-4c976340-a7c8-498e-964d-b19a4755482b tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095940, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.791593] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.131s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.792171] env[69927]: DEBUG nova.compute.manager [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 875.796560] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.782s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 875.796560] env[69927]: DEBUG nova.objects.instance [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lazy-loading 'resources' on Instance uuid a36b06ca-77c8-4d2f-8b43-2c160fbac93f {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 875.819386] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525fa958-59d5-91f5-8b2f-cf4e63a54f94, 'name': SearchDatastore_Task, 'duration_secs': 0.014835} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.820228] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40a3be36-541e-49a7-8f23-c649b212dde1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.827436] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 875.827436] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52357fab-0b0d-dbde-4126-40173952a161" [ 875.827436] env[69927]: _type = "Task" [ 875.827436] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.838515] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52357fab-0b0d-dbde-4126-40173952a161, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.867542] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095939, 'name': Rename_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.954654] env[69927]: DEBUG nova.compute.utils [None req-d2435e0e-9ca7-4a93-a0fd-8a065a7d47ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 876.158945] env[69927]: DEBUG oslo_concurrency.lockutils [req-a62b0dba-dbd0-412b-97bf-8bfcd6536978 req-05577c49-f038-4f58-afc9-27fa1cdd7d77 service nova] Releasing lock "refresh_cache-a2b1684f-82af-42fc-925e-db36f31cfe63" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.174098] env[69927]: DEBUG oslo_vmware.api [None req-4c976340-a7c8-498e-964d-b19a4755482b tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095940, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.300105] env[69927]: DEBUG nova.compute.utils [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 876.306022] env[69927]: DEBUG nova.compute.manager [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 876.306022] env[69927]: DEBUG nova.network.neutron [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 876.349387] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52357fab-0b0d-dbde-4126-40173952a161, 'name': SearchDatastore_Task, 'duration_secs': 0.026892} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.353312] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.353312] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] a2b1684f-82af-42fc-925e-db36f31cfe63/a2b1684f-82af-42fc-925e-db36f31cfe63.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 876.353678] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-07e94fd5-75f0-48d4-9179-f2941f5ca014 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.363281] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 876.363281] env[69927]: value = "task-4095941" [ 876.363281] env[69927]: _type = "Task" [ 876.363281] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.372328] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095939, 'name': Rename_Task, 'duration_secs': 1.197063} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.375890] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 876.376539] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f548b04a-c2ba-46a9-9204-93ad58c2a510 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.383339] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4095941, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.394302] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 876.394302] env[69927]: value = "task-4095942" [ 876.394302] env[69927]: _type = "Task" [ 876.394302] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.402668] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095942, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.448406] env[69927]: DEBUG nova.policy [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cae7aa9b1b2e4e8f9e8636fe513270f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6be96c1988054f0894a0b91881870c3c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 876.458277] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d2435e0e-9ca7-4a93-a0fd-8a065a7d47ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "66ba8086-2dd4-4d02-aac3-1bbb4a404784" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 876.542496] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Acquiring lock "27e20d58-1150-4b90-b888-d84aff1954ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.542750] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Lock "27e20d58-1150-4b90-b888-d84aff1954ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 876.678169] env[69927]: DEBUG oslo_vmware.api [None req-4c976340-a7c8-498e-964d-b19a4755482b tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095940, 'name': PowerOnVM_Task, 'duration_secs': 1.302993} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.678169] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c976340-a7c8-498e-964d-b19a4755482b tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 876.678169] env[69927]: DEBUG nova.compute.manager [None req-4c976340-a7c8-498e-964d-b19a4755482b tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 876.678542] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a208eb34-136a-4c7a-89c4-d66ba23f978b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.806343] env[69927]: DEBUG nova.compute.manager [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 876.875110] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4095941, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.913326] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095942, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.990058] env[69927]: DEBUG nova.network.neutron [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Successfully created port: fa4af0b2-0db2-4cb8-bfd1-8a09dc8c41d1 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 876.994647] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd34ef1-9be7-4e5c-ba89-8c9e49c5988d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.006208] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd0f258c-b48f-4a9e-8f92-f89d862fe342 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.043773] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e5aa54c-073f-4fcd-8af5-951ae5d5cc9b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.053032] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ace061-8c17-48cb-999f-74284b222a52 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.071230] env[69927]: DEBUG nova.compute.provider_tree [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 877.374382] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4095941, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.769275} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.374881] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] a2b1684f-82af-42fc-925e-db36f31cfe63/a2b1684f-82af-42fc-925e-db36f31cfe63.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 877.375139] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 877.375460] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c71af33e-1bbf-4e0f-96b5-c6ad5e97eeed {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.383338] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 877.383338] env[69927]: value = "task-4095943" [ 877.383338] env[69927]: _type = "Task" [ 877.383338] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.395372] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4095943, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.404320] env[69927]: DEBUG oslo_vmware.api [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095942, 'name': PowerOnVM_Task, 'duration_secs': 0.608131} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.404647] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 877.404910] env[69927]: INFO nova.compute.manager [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Took 9.13 seconds to spawn the instance on the hypervisor. [ 877.405194] env[69927]: DEBUG nova.compute.manager [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 877.407902] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c46e641-546e-4295-aa29-615aeb1460c3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.574738] env[69927]: DEBUG nova.scheduler.client.report [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 877.589753] env[69927]: DEBUG nova.network.neutron [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Successfully created port: b0d2f460-225f-47fe-96d1-bdefd018eac5 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 877.591977] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d2435e0e-9ca7-4a93-a0fd-8a065a7d47ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "66ba8086-2dd4-4d02-aac3-1bbb4a404784" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.592234] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d2435e0e-9ca7-4a93-a0fd-8a065a7d47ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "66ba8086-2dd4-4d02-aac3-1bbb4a404784" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.592456] env[69927]: INFO nova.compute.manager [None req-d2435e0e-9ca7-4a93-a0fd-8a065a7d47ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Attaching volume 9bd71aad-d455-47e7-a389-ca7f925cbe29 to /dev/sdb [ 877.658154] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b97a6646-c62f-4553-8d68-3c9ae3fde43d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.666340] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb2e028-22ad-4d3c-b6b4-cdc6fbf2ac85 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.679992] env[69927]: DEBUG nova.virt.block_device [None req-d2435e0e-9ca7-4a93-a0fd-8a065a7d47ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Updating existing volume attachment record: 21669d9c-fda0-4af0-ade1-35d218a4fa44 {{(pid=69927) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 877.821153] env[69927]: DEBUG nova.compute.manager [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 877.893732] env[69927]: DEBUG nova.virt.hardware [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 877.894616] env[69927]: DEBUG nova.virt.hardware [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 877.898461] env[69927]: DEBUG nova.virt.hardware [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 877.898705] env[69927]: DEBUG nova.virt.hardware [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 877.898863] env[69927]: DEBUG nova.virt.hardware [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 877.899028] env[69927]: DEBUG nova.virt.hardware [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 877.899258] env[69927]: DEBUG nova.virt.hardware [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 877.899448] env[69927]: DEBUG nova.virt.hardware [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 877.899586] env[69927]: DEBUG nova.virt.hardware [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 877.899750] env[69927]: DEBUG nova.virt.hardware [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 877.899925] env[69927]: DEBUG nova.virt.hardware [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 877.902156] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e4abb7a-b6a7-4d75-91ef-b8f989b708e5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.913858] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4095943, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.147011} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.916433] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 877.917294] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cd7144a-5855-4d99-9a6f-4855ad6ac536 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.924249] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e879d2b1-39bf-4b89-99e6-e31abca00cc1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.940032] env[69927]: INFO nova.compute.manager [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Took 46.45 seconds to build instance. [ 877.974130] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] a2b1684f-82af-42fc-925e-db36f31cfe63/a2b1684f-82af-42fc-925e-db36f31cfe63.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 877.974628] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20a4cdc2-65bd-4554-aa75-34c248402693 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.998852] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 877.998852] env[69927]: value = "task-4095945" [ 877.998852] env[69927]: _type = "Task" [ 877.998852] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.008232] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4095945, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.081667] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.285s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.086320] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.112s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.088184] env[69927]: INFO nova.compute.claims [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 878.163134] env[69927]: DEBUG nova.network.neutron [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Successfully created port: e789eded-c761-41d1-90f9-0541da7d7635 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 878.194658] env[69927]: INFO nova.scheduler.client.report [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Deleted allocations for instance a36b06ca-77c8-4d2f-8b43-2c160fbac93f [ 878.442032] env[69927]: DEBUG oslo_concurrency.lockutils [None req-503ff1f2-5a45-4852-80c5-7699d24d401d tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "b67630a4-2c1a-440b-af82-80c908ffa6e9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.128s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.510419] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4095945, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.710487] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b2d44c12-7009-48c8-ba77-56b27ea05ad8 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lock "a36b06ca-77c8-4d2f-8b43-2c160fbac93f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.338s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.711610] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6a445427-5cc8-4732-a1bc-25af3b8c91ab tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lock "a36b06ca-77c8-4d2f-8b43-2c160fbac93f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 36.762s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.711859] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6a445427-5cc8-4732-a1bc-25af3b8c91ab tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquiring lock "a36b06ca-77c8-4d2f-8b43-2c160fbac93f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.712108] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6a445427-5cc8-4732-a1bc-25af3b8c91ab tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lock "a36b06ca-77c8-4d2f-8b43-2c160fbac93f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.712302] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6a445427-5cc8-4732-a1bc-25af3b8c91ab tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lock "a36b06ca-77c8-4d2f-8b43-2c160fbac93f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.714428] env[69927]: INFO nova.compute.manager [None req-6a445427-5cc8-4732-a1bc-25af3b8c91ab tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Terminating instance [ 878.946810] env[69927]: DEBUG nova.compute.manager [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 879.011323] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4095945, 'name': ReconfigVM_Task, 'duration_secs': 0.661735} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.011634] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Reconfigured VM instance instance-00000032 to attach disk [datastore1] a2b1684f-82af-42fc-925e-db36f31cfe63/a2b1684f-82af-42fc-925e-db36f31cfe63.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 879.012252] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-81e1e7b4-edae-4887-a15b-d5bc161d2dd7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.020419] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 879.020419] env[69927]: value = "task-4095948" [ 879.020419] env[69927]: _type = "Task" [ 879.020419] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.030669] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4095948, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.056617] env[69927]: DEBUG oslo_concurrency.lockutils [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquiring lock "6be47dcb-ce00-4b81-9e69-35acabac046e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 879.056862] env[69927]: DEBUG oslo_concurrency.lockutils [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Lock "6be47dcb-ce00-4b81-9e69-35acabac046e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 879.057149] env[69927]: DEBUG oslo_concurrency.lockutils [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquiring lock "6be47dcb-ce00-4b81-9e69-35acabac046e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 879.057362] env[69927]: DEBUG oslo_concurrency.lockutils [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Lock "6be47dcb-ce00-4b81-9e69-35acabac046e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 879.057498] env[69927]: DEBUG oslo_concurrency.lockutils [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Lock "6be47dcb-ce00-4b81-9e69-35acabac046e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.059771] env[69927]: INFO nova.compute.manager [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Terminating instance [ 879.218200] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6a445427-5cc8-4732-a1bc-25af3b8c91ab tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquiring lock "refresh_cache-a36b06ca-77c8-4d2f-8b43-2c160fbac93f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.218385] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6a445427-5cc8-4732-a1bc-25af3b8c91ab tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquired lock "refresh_cache-a36b06ca-77c8-4d2f-8b43-2c160fbac93f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 879.218638] env[69927]: DEBUG nova.network.neutron [None req-6a445427-5cc8-4732-a1bc-25af3b8c91ab tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 879.287759] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c0aa099f-f4c8-4d9d-a8c7-dac4bf7b6d3d tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Acquiring lock "8442f144-2be4-4634-b151-62f049a975b6" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 879.287759] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c0aa099f-f4c8-4d9d-a8c7-dac4bf7b6d3d tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Lock "8442f144-2be4-4634-b151-62f049a975b6" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 879.465589] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 879.530971] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4095948, 'name': Rename_Task, 'duration_secs': 0.177679} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.532385] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 879.537977] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0524840f-12b6-4394-b9ed-3e61e2ca1db6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.546956] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 879.546956] env[69927]: value = "task-4095949" [ 879.546956] env[69927]: _type = "Task" [ 879.546956] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.559372] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4095949, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.563585] env[69927]: DEBUG nova.compute.manager [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 879.563892] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 879.569813] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb867e38-8ce0-4663-8300-8c0e9c9c06d5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.581455] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 879.581855] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-86a78ae5-3b94-4750-a804-d738189c05b1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.589925] env[69927]: DEBUG oslo_vmware.api [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 879.589925] env[69927]: value = "task-4095950" [ 879.589925] env[69927]: _type = "Task" [ 879.589925] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.604886] env[69927]: DEBUG oslo_vmware.api [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095950, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.664749] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88653e45-3887-4b51-87b9-8817b8362e9b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.672793] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbede0f1-63e7-4bf8-8aed-44c4d1a27992 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.708476] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e99f3b8-e00e-453f-a065-44711734e1f0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.717170] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e8a6cb-cc31-4ed9-b7e5-4cd06b3f9a27 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.721808] env[69927]: DEBUG nova.compute.utils [None req-6a445427-5cc8-4732-a1bc-25af3b8c91ab tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Can not refresh info_cache because instance was not found {{(pid=69927) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1056}} [ 879.735076] env[69927]: DEBUG nova.compute.provider_tree [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 879.744449] env[69927]: DEBUG nova.network.neutron [None req-6a445427-5cc8-4732-a1bc-25af3b8c91ab tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 879.791308] env[69927]: DEBUG nova.compute.utils [None req-c0aa099f-f4c8-4d9d-a8c7-dac4bf7b6d3d tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 879.874492] env[69927]: DEBUG nova.network.neutron [None req-6a445427-5cc8-4732-a1bc-25af3b8c91ab tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.008897] env[69927]: DEBUG nova.network.neutron [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Successfully updated port: fa4af0b2-0db2-4cb8-bfd1-8a09dc8c41d1 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 880.041594] env[69927]: INFO nova.compute.manager [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Rebuilding instance [ 880.061200] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4095949, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.087497] env[69927]: DEBUG nova.compute.manager [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 880.088444] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-975f2ba5-45a4-4b91-9bf3-e2e48950ae28 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.099828] env[69927]: DEBUG oslo_vmware.api [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095950, 'name': PowerOffVM_Task, 'duration_secs': 0.224082} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.101527] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 880.101717] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 880.104501] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-71dd49f7-db6c-4acd-bc4b-bf53bab5cb07 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.177062] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 880.177062] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 880.177062] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Deleting the datastore file [datastore2] 6be47dcb-ce00-4b81-9e69-35acabac046e {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 880.177283] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f3f3526c-9964-46e6-9d06-be2a7a9345b2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.184107] env[69927]: DEBUG oslo_vmware.api [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 880.184107] env[69927]: value = "task-4095952" [ 880.184107] env[69927]: _type = "Task" [ 880.184107] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.192748] env[69927]: DEBUG oslo_vmware.api [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095952, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.238634] env[69927]: DEBUG nova.scheduler.client.report [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 880.294214] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c0aa099f-f4c8-4d9d-a8c7-dac4bf7b6d3d tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Lock "8442f144-2be4-4634-b151-62f049a975b6" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.377741] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6a445427-5cc8-4732-a1bc-25af3b8c91ab tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Releasing lock "refresh_cache-a36b06ca-77c8-4d2f-8b43-2c160fbac93f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 880.378234] env[69927]: DEBUG nova.compute.manager [None req-6a445427-5cc8-4732-a1bc-25af3b8c91ab tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 880.378444] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6a445427-5cc8-4732-a1bc-25af3b8c91ab tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 880.378847] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6d24d264-b64a-4a67-8a6e-2c0005eaf6dc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.391111] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de4ddf8-014c-427b-9aa9-ded11a2fcd5a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.426770] env[69927]: WARNING nova.virt.vmwareapi.vmops [None req-6a445427-5cc8-4732-a1bc-25af3b8c91ab tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a36b06ca-77c8-4d2f-8b43-2c160fbac93f could not be found. [ 880.426940] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6a445427-5cc8-4732-a1bc-25af3b8c91ab tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 880.427143] env[69927]: INFO nova.compute.manager [None req-6a445427-5cc8-4732-a1bc-25af3b8c91ab tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Took 0.05 seconds to destroy the instance on the hypervisor. [ 880.427410] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6a445427-5cc8-4732-a1bc-25af3b8c91ab tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 880.427693] env[69927]: DEBUG nova.compute.manager [-] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 880.427825] env[69927]: DEBUG nova.network.neutron [-] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 880.449520] env[69927]: DEBUG nova.network.neutron [-] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 880.557621] env[69927]: DEBUG oslo_vmware.api [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4095949, 'name': PowerOnVM_Task, 'duration_secs': 0.846703} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.557923] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 880.558139] env[69927]: INFO nova.compute.manager [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Took 9.56 seconds to spawn the instance on the hypervisor. [ 880.558322] env[69927]: DEBUG nova.compute.manager [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 880.559131] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9673b0d-8119-4bbe-a9c7-d1aa1886b024 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.695126] env[69927]: DEBUG oslo_vmware.api [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4095952, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184312} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.695126] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 880.695462] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 880.695462] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 880.695579] env[69927]: INFO nova.compute.manager [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 880.695838] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 880.696056] env[69927]: DEBUG nova.compute.manager [-] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 880.696174] env[69927]: DEBUG nova.network.neutron [-] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 880.752105] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.666s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.752700] env[69927]: DEBUG nova.compute.manager [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 880.755731] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.363s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.758636] env[69927]: INFO nova.compute.claims [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 880.935297] env[69927]: DEBUG nova.compute.manager [req-db4a49b0-bf94-42e9-9b7c-c3389d89ecdf req-76a94d7e-40b4-4acf-b6cf-e4e4ecee5993 service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Received event network-vif-plugged-fa4af0b2-0db2-4cb8-bfd1-8a09dc8c41d1 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 880.938621] env[69927]: DEBUG oslo_concurrency.lockutils [req-db4a49b0-bf94-42e9-9b7c-c3389d89ecdf req-76a94d7e-40b4-4acf-b6cf-e4e4ecee5993 service nova] Acquiring lock "480a672c-cb48-45e3-86bd-1741957a5124-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 880.938621] env[69927]: DEBUG oslo_concurrency.lockutils [req-db4a49b0-bf94-42e9-9b7c-c3389d89ecdf req-76a94d7e-40b4-4acf-b6cf-e4e4ecee5993 service nova] Lock "480a672c-cb48-45e3-86bd-1741957a5124-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.938621] env[69927]: DEBUG oslo_concurrency.lockutils [req-db4a49b0-bf94-42e9-9b7c-c3389d89ecdf req-76a94d7e-40b4-4acf-b6cf-e4e4ecee5993 service nova] Lock "480a672c-cb48-45e3-86bd-1741957a5124-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.938621] env[69927]: DEBUG nova.compute.manager [req-db4a49b0-bf94-42e9-9b7c-c3389d89ecdf req-76a94d7e-40b4-4acf-b6cf-e4e4ecee5993 service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] No waiting events found dispatching network-vif-plugged-fa4af0b2-0db2-4cb8-bfd1-8a09dc8c41d1 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 880.938621] env[69927]: WARNING nova.compute.manager [req-db4a49b0-bf94-42e9-9b7c-c3389d89ecdf req-76a94d7e-40b4-4acf-b6cf-e4e4ecee5993 service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Received unexpected event network-vif-plugged-fa4af0b2-0db2-4cb8-bfd1-8a09dc8c41d1 for instance with vm_state building and task_state spawning. [ 880.953274] env[69927]: DEBUG nova.network.neutron [-] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.078504] env[69927]: INFO nova.compute.manager [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Took 47.85 seconds to build instance. [ 881.111284] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 881.111584] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4f6b729c-6110-40da-be58-98c44b94d252 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.119433] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 881.119433] env[69927]: value = "task-4095954" [ 881.119433] env[69927]: _type = "Task" [ 881.119433] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.131620] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095954, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.264935] env[69927]: DEBUG nova.compute.utils [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 881.270241] env[69927]: DEBUG nova.compute.manager [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Not allocating networking since 'none' was specified. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 881.419383] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c0aa099f-f4c8-4d9d-a8c7-dac4bf7b6d3d tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Acquiring lock "8442f144-2be4-4634-b151-62f049a975b6" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.419383] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c0aa099f-f4c8-4d9d-a8c7-dac4bf7b6d3d tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Lock "8442f144-2be4-4634-b151-62f049a975b6" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.419383] env[69927]: INFO nova.compute.manager [None req-c0aa099f-f4c8-4d9d-a8c7-dac4bf7b6d3d tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Attaching volume a42dd037-f6b1-4b77-ab7f-5045e67cd2b5 to /dev/sdb [ 881.458290] env[69927]: INFO nova.compute.manager [-] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Took 1.03 seconds to deallocate network for instance. [ 881.479107] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d2997b-b6b4-44e3-b2f8-3f6f53ff6df0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.486652] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9158f0a2-e45c-4fa6-a8fc-964934507c0d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.501687] env[69927]: DEBUG nova.virt.block_device [None req-c0aa099f-f4c8-4d9d-a8c7-dac4bf7b6d3d tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Updating existing volume attachment record: c359948e-55d2-4c3f-8f98-8d57ed437510 {{(pid=69927) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 881.507404] env[69927]: DEBUG nova.network.neutron [-] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.580315] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fd9cd17d-3a27-44e3-8b8c-e3f901829dcd tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "a2b1684f-82af-42fc-925e-db36f31cfe63" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 99.850s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.631295] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095954, 'name': PowerOffVM_Task, 'duration_secs': 0.287681} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.631571] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 881.631806] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 881.632596] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c33419cf-667d-40bc-a18f-f0b606c12ebb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.640091] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 881.640356] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a4e5764-e9f3-489d-8b87-826bd22b2c0d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.717681] env[69927]: DEBUG nova.compute.manager [req-8de11a13-a9f2-4934-9aae-c73a807e17ae req-17247ab3-bc33-48c0-8e93-203c8f972b20 service nova] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Received event network-vif-deleted-ff1455ee-4ab2-4e4d-ac72-6a6554002936 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 881.728866] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 881.729202] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 881.729375] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Deleting the datastore file [datastore1] b67630a4-2c1a-440b-af82-80c908ffa6e9 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 881.729709] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-306ac81e-3199-42c8-85ed-acad241c3855 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.738079] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 881.738079] env[69927]: value = "task-4095959" [ 881.738079] env[69927]: _type = "Task" [ 881.738079] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.746495] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095959, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.776265] env[69927]: DEBUG nova.compute.manager [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 881.966896] env[69927]: INFO nova.compute.manager [None req-6a445427-5cc8-4732-a1bc-25af3b8c91ab tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Instance disappeared during terminate [ 881.968337] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6a445427-5cc8-4732-a1bc-25af3b8c91ab tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lock "a36b06ca-77c8-4d2f-8b43-2c160fbac93f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.256s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.014499] env[69927]: INFO nova.compute.manager [-] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Took 1.31 seconds to deallocate network for instance. [ 882.087040] env[69927]: DEBUG nova.compute.manager [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 882.112747] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquiring lock "a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.113030] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lock "a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 882.113270] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquiring lock "a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.113451] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lock "a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 882.113613] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lock "a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.118136] env[69927]: INFO nova.compute.manager [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Terminating instance [ 882.254516] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095959, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190027} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.254843] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 882.255052] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 882.255267] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 882.388660] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb69eda-4a4a-4d19-98ad-f114f4dd5d40 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.396826] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-494f7ca6-e734-4139-abec-469520daec5c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.430188] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-947a8645-f61a-439a-a045-7d56bb9c0be2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.439679] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5ad917-a4fc-40f4-9a41-f02f224b128b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.454976] env[69927]: DEBUG nova.compute.provider_tree [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.525694] env[69927]: DEBUG oslo_concurrency.lockutils [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.608938] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.623685] env[69927]: DEBUG nova.compute.manager [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 882.623685] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 882.624578] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da2a28c7-503a-4c04-aeb4-93b865572436 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.634626] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 882.634930] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-593ee6aa-0bf0-494b-9f80-aac63369cd79 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.643792] env[69927]: DEBUG oslo_vmware.api [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Waiting for the task: (returnval){ [ 882.643792] env[69927]: value = "task-4095960" [ 882.643792] env[69927]: _type = "Task" [ 882.643792] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.654371] env[69927]: DEBUG oslo_vmware.api [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095960, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.741359] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2435e0e-9ca7-4a93-a0fd-8a065a7d47ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Volume attach. Driver type: vmdk {{(pid=69927) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 882.741604] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2435e0e-9ca7-4a93-a0fd-8a065a7d47ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811437', 'volume_id': '9bd71aad-d455-47e7-a389-ca7f925cbe29', 'name': 'volume-9bd71aad-d455-47e7-a389-ca7f925cbe29', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '66ba8086-2dd4-4d02-aac3-1bbb4a404784', 'attached_at': '', 'detached_at': '', 'volume_id': '9bd71aad-d455-47e7-a389-ca7f925cbe29', 'serial': '9bd71aad-d455-47e7-a389-ca7f925cbe29'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 882.742502] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37607e49-5c5c-4823-b732-9ddfef263d79 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.766443] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c470f7-809f-421b-8c8e-e79d7168263e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.787066] env[69927]: DEBUG nova.compute.manager [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 882.798105] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2435e0e-9ca7-4a93-a0fd-8a065a7d47ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] volume-9bd71aad-d455-47e7-a389-ca7f925cbe29/volume-9bd71aad-d455-47e7-a389-ca7f925cbe29.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 882.798710] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b772ee24-26c8-498e-af5f-86a2c686effd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.821719] env[69927]: DEBUG oslo_vmware.api [None req-d2435e0e-9ca7-4a93-a0fd-8a065a7d47ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 882.821719] env[69927]: value = "task-4095961" [ 882.821719] env[69927]: _type = "Task" [ 882.821719] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.832902] env[69927]: DEBUG oslo_vmware.api [None req-d2435e0e-9ca7-4a93-a0fd-8a065a7d47ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095961, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.858548] env[69927]: DEBUG nova.virt.hardware [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 882.858806] env[69927]: DEBUG nova.virt.hardware [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 882.858964] env[69927]: DEBUG nova.virt.hardware [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 882.859304] env[69927]: DEBUG nova.virt.hardware [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 882.859471] env[69927]: DEBUG nova.virt.hardware [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 882.859620] env[69927]: DEBUG nova.virt.hardware [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 882.859835] env[69927]: DEBUG nova.virt.hardware [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 882.859993] env[69927]: DEBUG nova.virt.hardware [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 882.860177] env[69927]: DEBUG nova.virt.hardware [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 882.860347] env[69927]: DEBUG nova.virt.hardware [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 882.860521] env[69927]: DEBUG nova.virt.hardware [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 882.861439] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8aef5fc-03ff-4b2d-82b4-0168840d9d25 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.870476] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72708bf0-8373-4bfa-adef-2c53e62a00c2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.886712] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Instance VIF info [] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 882.892749] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Creating folder: Project (c921170bd19c45a0adee56911ade4d71). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 882.893114] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-03c0a8a5-ff21-4980-875d-2b364b069a7b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.905582] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Created folder: Project (c921170bd19c45a0adee56911ade4d71) in parent group-v811283. [ 882.905798] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Creating folder: Instances. Parent ref: group-v811441. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 882.906104] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-94ea2e7d-c1f5-4f1d-b36f-bb6ec723159d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.917205] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Created folder: Instances in parent group-v811441. [ 882.917530] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 882.917770] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 882.918038] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9faf8fca-fc47-4a4a-a935-3856a3cda26a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.938361] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 882.938361] env[69927]: value = "task-4095964" [ 882.938361] env[69927]: _type = "Task" [ 882.938361] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.947120] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095964, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.958436] env[69927]: DEBUG nova.scheduler.client.report [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 883.059389] env[69927]: DEBUG nova.network.neutron [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Successfully updated port: b0d2f460-225f-47fe-96d1-bdefd018eac5 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 883.157691] env[69927]: DEBUG oslo_vmware.api [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095960, 'name': PowerOffVM_Task, 'duration_secs': 0.220429} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.158211] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 883.158397] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 883.158674] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0472c39f-0025-444f-a972-bda07b158753 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.259931] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 883.260363] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 883.260628] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Deleting the datastore file [datastore2] a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 883.260929] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fd151037-6f5f-45e7-a666-839e7f866758 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.273505] env[69927]: DEBUG oslo_vmware.api [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Waiting for the task: (returnval){ [ 883.273505] env[69927]: value = "task-4095966" [ 883.273505] env[69927]: _type = "Task" [ 883.273505] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.284460] env[69927]: DEBUG oslo_vmware.api [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095966, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.297130] env[69927]: DEBUG nova.virt.hardware [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 883.297472] env[69927]: DEBUG nova.virt.hardware [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 883.297548] env[69927]: DEBUG nova.virt.hardware [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 883.297732] env[69927]: DEBUG nova.virt.hardware [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 883.297881] env[69927]: DEBUG nova.virt.hardware [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 883.298040] env[69927]: DEBUG nova.virt.hardware [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 883.298258] env[69927]: DEBUG nova.virt.hardware [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 883.298420] env[69927]: DEBUG nova.virt.hardware [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 883.298586] env[69927]: DEBUG nova.virt.hardware [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 883.298991] env[69927]: DEBUG nova.virt.hardware [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 883.298991] env[69927]: DEBUG nova.virt.hardware [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 883.299827] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d737634c-685a-4ff3-8f7a-325cd982b1a2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.310638] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c21770ac-2a65-4605-a8dd-05b251faec0c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.326475] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:29:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '415e68b4-3766-4359-afe2-f8563910d98c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b590e270-89a9-47f6-8e4e-69b428b381a8', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 883.335227] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 883.337842] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 883.338126] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-caa1c5dc-30dc-4801-801f-c5e494bf0370 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.358776] env[69927]: DEBUG oslo_vmware.api [None req-d2435e0e-9ca7-4a93-a0fd-8a065a7d47ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095961, 'name': ReconfigVM_Task, 'duration_secs': 0.469443} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.360268] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2435e0e-9ca7-4a93-a0fd-8a065a7d47ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Reconfigured VM instance instance-00000030 to attach disk [datastore1] volume-9bd71aad-d455-47e7-a389-ca7f925cbe29/volume-9bd71aad-d455-47e7-a389-ca7f925cbe29.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 883.365238] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 883.365238] env[69927]: value = "task-4095967" [ 883.365238] env[69927]: _type = "Task" [ 883.365238] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.366720] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6889438c-0163-414e-a344-06e8705af570 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.377975] env[69927]: DEBUG nova.compute.manager [req-1d1ef733-8a4e-40ff-a1f2-31d4da6e04b7 req-ea672608-36fd-46c9-800e-835722f26b16 service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Received event network-changed-fa4af0b2-0db2-4cb8-bfd1-8a09dc8c41d1 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 883.378184] env[69927]: DEBUG nova.compute.manager [req-1d1ef733-8a4e-40ff-a1f2-31d4da6e04b7 req-ea672608-36fd-46c9-800e-835722f26b16 service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Refreshing instance network info cache due to event network-changed-fa4af0b2-0db2-4cb8-bfd1-8a09dc8c41d1. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 883.378404] env[69927]: DEBUG oslo_concurrency.lockutils [req-1d1ef733-8a4e-40ff-a1f2-31d4da6e04b7 req-ea672608-36fd-46c9-800e-835722f26b16 service nova] Acquiring lock "refresh_cache-480a672c-cb48-45e3-86bd-1741957a5124" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.378546] env[69927]: DEBUG oslo_concurrency.lockutils [req-1d1ef733-8a4e-40ff-a1f2-31d4da6e04b7 req-ea672608-36fd-46c9-800e-835722f26b16 service nova] Acquired lock "refresh_cache-480a672c-cb48-45e3-86bd-1741957a5124" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.378705] env[69927]: DEBUG nova.network.neutron [req-1d1ef733-8a4e-40ff-a1f2-31d4da6e04b7 req-ea672608-36fd-46c9-800e-835722f26b16 service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Refreshing network info cache for port fa4af0b2-0db2-4cb8-bfd1-8a09dc8c41d1 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 883.390683] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095967, 'name': CreateVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.392440] env[69927]: DEBUG oslo_vmware.api [None req-d2435e0e-9ca7-4a93-a0fd-8a065a7d47ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 883.392440] env[69927]: value = "task-4095968" [ 883.392440] env[69927]: _type = "Task" [ 883.392440] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.402762] env[69927]: DEBUG oslo_vmware.api [None req-d2435e0e-9ca7-4a93-a0fd-8a065a7d47ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095968, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.450348] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095964, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.465139] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.709s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.465139] env[69927]: DEBUG nova.compute.manager [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 883.468060] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.359s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.469795] env[69927]: INFO nova.compute.claims [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 883.787733] env[69927]: DEBUG oslo_vmware.api [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Task: {'id': task-4095966, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.183339} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.788179] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 883.788772] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 883.789543] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 883.789851] env[69927]: INFO nova.compute.manager [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Took 1.17 seconds to destroy the instance on the hypervisor. [ 883.790216] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 883.790542] env[69927]: DEBUG nova.compute.manager [-] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 883.790654] env[69927]: DEBUG nova.network.neutron [-] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 883.889232] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095967, 'name': CreateVM_Task, 'duration_secs': 0.446281} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.889709] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 883.890400] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.890564] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.890885] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 883.891164] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15052f9e-2a04-4591-ad0d-168db4157fcf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.898701] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 883.898701] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5283ca4f-6d8a-0fff-3b8f-510c5b8ed811" [ 883.898701] env[69927]: _type = "Task" [ 883.898701] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.905600] env[69927]: DEBUG oslo_vmware.api [None req-d2435e0e-9ca7-4a93-a0fd-8a065a7d47ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095968, 'name': ReconfigVM_Task, 'duration_secs': 0.217963} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.906316] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2435e0e-9ca7-4a93-a0fd-8a065a7d47ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811437', 'volume_id': '9bd71aad-d455-47e7-a389-ca7f925cbe29', 'name': 'volume-9bd71aad-d455-47e7-a389-ca7f925cbe29', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '66ba8086-2dd4-4d02-aac3-1bbb4a404784', 'attached_at': '', 'detached_at': '', 'volume_id': '9bd71aad-d455-47e7-a389-ca7f925cbe29', 'serial': '9bd71aad-d455-47e7-a389-ca7f925cbe29'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 883.917917] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5283ca4f-6d8a-0fff-3b8f-510c5b8ed811, 'name': SearchDatastore_Task, 'duration_secs': 0.010781} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.918615] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 883.918732] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 883.918992] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.919160] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.919343] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 883.919614] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4398f83b-6dc5-4f92-a1f6-9b11b484a9b4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.930388] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 883.933029] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 883.933029] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fee42391-fa1b-44dd-8d0c-673dda9088d7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.946171] env[69927]: DEBUG nova.network.neutron [req-1d1ef733-8a4e-40ff-a1f2-31d4da6e04b7 req-ea672608-36fd-46c9-800e-835722f26b16 service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 883.948574] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 883.948574] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ab56c4-b728-a1a8-5cc4-b56a1d7bc3c8" [ 883.948574] env[69927]: _type = "Task" [ 883.948574] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.956619] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095964, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.964128] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ab56c4-b728-a1a8-5cc4-b56a1d7bc3c8, 'name': SearchDatastore_Task, 'duration_secs': 0.011119} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.965774] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76dcec6a-f23c-4143-afb5-21abc0e24a0b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.974027] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 883.974027] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e608e7-0a7d-9941-8349-efb04ab31c90" [ 883.974027] env[69927]: _type = "Task" [ 883.974027] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.976522] env[69927]: DEBUG nova.compute.utils [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 883.979811] env[69927]: DEBUG nova.compute.manager [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 883.980141] env[69927]: DEBUG nova.network.neutron [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 883.988294] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e608e7-0a7d-9941-8349-efb04ab31c90, 'name': SearchDatastore_Task, 'duration_secs': 0.010806} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.989883] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 883.990290] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] b67630a4-2c1a-440b-af82-80c908ffa6e9/b67630a4-2c1a-440b-af82-80c908ffa6e9.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 883.990676] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea89d84c-48be-455b-b888-7a3f0a07f618 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.002602] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 884.002602] env[69927]: value = "task-4095970" [ 884.002602] env[69927]: _type = "Task" [ 884.002602] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.010414] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095970, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.053740] env[69927]: DEBUG nova.policy [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '380eaeedd7e740cba44bda875880dfc9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a40e3fd86af540ba921103886c84b280', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 884.088686] env[69927]: DEBUG nova.network.neutron [req-1d1ef733-8a4e-40ff-a1f2-31d4da6e04b7 req-ea672608-36fd-46c9-800e-835722f26b16 service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.355687] env[69927]: DEBUG nova.compute.manager [req-52fcb081-bb32-4f7e-ac03-b1c0836906c4 req-b5abdfb2-473b-4d1f-a4b5-3f3860f81778 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Received event network-vif-deleted-5615148b-36c9-40b6-9282-76bdcfb9931e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 884.356046] env[69927]: INFO nova.compute.manager [req-52fcb081-bb32-4f7e-ac03-b1c0836906c4 req-b5abdfb2-473b-4d1f-a4b5-3f3860f81778 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Neutron deleted interface 5615148b-36c9-40b6-9282-76bdcfb9931e; detaching it from the instance and deleting it from the info cache [ 884.356217] env[69927]: DEBUG nova.network.neutron [req-52fcb081-bb32-4f7e-ac03-b1c0836906c4 req-b5abdfb2-473b-4d1f-a4b5-3f3860f81778 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.450373] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095964, 'name': CreateVM_Task, 'duration_secs': 1.322241} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.450607] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 884.451022] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.451197] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 884.451639] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 884.451902] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3eb0d912-9c21-4f7e-8b4a-ed4884486717 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.457824] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Waiting for the task: (returnval){ [ 884.457824] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521f3861-694e-d63f-cce2-4c33a6a3c8ea" [ 884.457824] env[69927]: _type = "Task" [ 884.457824] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.467959] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521f3861-694e-d63f-cce2-4c33a6a3c8ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.481926] env[69927]: DEBUG nova.compute.manager [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 884.512338] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095970, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.591406] env[69927]: DEBUG oslo_concurrency.lockutils [req-1d1ef733-8a4e-40ff-a1f2-31d4da6e04b7 req-ea672608-36fd-46c9-800e-835722f26b16 service nova] Releasing lock "refresh_cache-480a672c-cb48-45e3-86bd-1741957a5124" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 884.591681] env[69927]: DEBUG nova.compute.manager [req-1d1ef733-8a4e-40ff-a1f2-31d4da6e04b7 req-ea672608-36fd-46c9-800e-835722f26b16 service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Received event network-vif-plugged-b0d2f460-225f-47fe-96d1-bdefd018eac5 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 884.591902] env[69927]: DEBUG oslo_concurrency.lockutils [req-1d1ef733-8a4e-40ff-a1f2-31d4da6e04b7 req-ea672608-36fd-46c9-800e-835722f26b16 service nova] Acquiring lock "480a672c-cb48-45e3-86bd-1741957a5124-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.592263] env[69927]: DEBUG oslo_concurrency.lockutils [req-1d1ef733-8a4e-40ff-a1f2-31d4da6e04b7 req-ea672608-36fd-46c9-800e-835722f26b16 service nova] Lock "480a672c-cb48-45e3-86bd-1741957a5124-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.592390] env[69927]: DEBUG oslo_concurrency.lockutils [req-1d1ef733-8a4e-40ff-a1f2-31d4da6e04b7 req-ea672608-36fd-46c9-800e-835722f26b16 service nova] Lock "480a672c-cb48-45e3-86bd-1741957a5124-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.592486] env[69927]: DEBUG nova.compute.manager [req-1d1ef733-8a4e-40ff-a1f2-31d4da6e04b7 req-ea672608-36fd-46c9-800e-835722f26b16 service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] No waiting events found dispatching network-vif-plugged-b0d2f460-225f-47fe-96d1-bdefd018eac5 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 884.592658] env[69927]: WARNING nova.compute.manager [req-1d1ef733-8a4e-40ff-a1f2-31d4da6e04b7 req-ea672608-36fd-46c9-800e-835722f26b16 service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Received unexpected event network-vif-plugged-b0d2f460-225f-47fe-96d1-bdefd018eac5 for instance with vm_state building and task_state spawning. [ 884.688022] env[69927]: DEBUG nova.network.neutron [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Successfully created port: 0a7e52ca-6c1d-456c-a839-ce5f626c210d {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 884.809457] env[69927]: DEBUG nova.network.neutron [-] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.865990] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-663dd703-2ee0-418b-a9a6-200b9fbeb38b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.878850] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a961b78-a4a8-4d83-9bf5-86b82e52a793 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.927896] env[69927]: DEBUG nova.compute.manager [req-52fcb081-bb32-4f7e-ac03-b1c0836906c4 req-b5abdfb2-473b-4d1f-a4b5-3f3860f81778 service nova] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Detach interface failed, port_id=5615148b-36c9-40b6-9282-76bdcfb9931e, reason: Instance a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 884.975447] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521f3861-694e-d63f-cce2-4c33a6a3c8ea, 'name': SearchDatastore_Task, 'duration_secs': 0.022566} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.978733] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 884.979124] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 884.979414] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.979633] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 884.979922] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 884.980498] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9541a852-922b-4342-949f-e0a67ffc1ba3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.997386] env[69927]: DEBUG nova.objects.instance [None req-d2435e0e-9ca7-4a93-a0fd-8a065a7d47ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lazy-loading 'flavor' on Instance uuid 66ba8086-2dd4-4d02-aac3-1bbb4a404784 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 884.999310] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 884.999625] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 885.004088] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a0a8be4-ea45-4b92-995e-3befe77bc34e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.020966] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Waiting for the task: (returnval){ [ 885.020966] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52041754-c10f-ff71-2e55-0949771031fe" [ 885.020966] env[69927]: _type = "Task" [ 885.020966] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.027023] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095970, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.039711] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52041754-c10f-ff71-2e55-0949771031fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.184829] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b2ad8ba-1cd7-4804-bb8e-980c571aaddc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.196091] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4649010c-e163-4359-9d58-1b3ba6f78b8d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.233724] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-946e8c24-0788-465a-a362-15546dcd4382 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.249199] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4015147d-6133-48c3-92e3-cfc5696dcceb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.261254] env[69927]: DEBUG nova.compute.provider_tree [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 885.313750] env[69927]: INFO nova.compute.manager [-] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Took 1.52 seconds to deallocate network for instance. [ 885.495690] env[69927]: DEBUG nova.compute.manager [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 885.506845] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d2435e0e-9ca7-4a93-a0fd-8a065a7d47ee tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "66ba8086-2dd4-4d02-aac3-1bbb4a404784" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.914s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 885.526400] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095970, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.187299} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.529560] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] b67630a4-2c1a-440b-af82-80c908ffa6e9/b67630a4-2c1a-440b-af82-80c908ffa6e9.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 885.529782] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 885.533835] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0c55389e-7d60-4aa1-abcb-241971779dcd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.543981] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52041754-c10f-ff71-2e55-0949771031fe, 'name': SearchDatastore_Task, 'duration_secs': 0.06866} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.547438] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 885.547438] env[69927]: value = "task-4095971" [ 885.547438] env[69927]: _type = "Task" [ 885.547438] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.549849] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd874aef-89d6-4075-9bc6-9a99179d07aa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.562193] env[69927]: DEBUG nova.virt.hardware [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 885.563425] env[69927]: DEBUG nova.virt.hardware [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 885.563425] env[69927]: DEBUG nova.virt.hardware [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 885.563425] env[69927]: DEBUG nova.virt.hardware [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 885.563425] env[69927]: DEBUG nova.virt.hardware [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 885.563425] env[69927]: DEBUG nova.virt.hardware [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 885.563590] env[69927]: DEBUG nova.virt.hardware [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 885.563768] env[69927]: DEBUG nova.virt.hardware [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 885.564036] env[69927]: DEBUG nova.virt.hardware [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 885.564264] env[69927]: DEBUG nova.virt.hardware [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 885.564470] env[69927]: DEBUG nova.virt.hardware [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 885.565388] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751a2556-b8f4-43be-92e4-51c4fef4b724 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.572931] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Waiting for the task: (returnval){ [ 885.572931] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52802d6e-5860-cb29-29b0-68b27f593c2f" [ 885.572931] env[69927]: _type = "Task" [ 885.572931] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.573157] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095971, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.582606] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b04bf91-f76f-41eb-bf7e-84c098faa112 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.590735] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52802d6e-5860-cb29-29b0-68b27f593c2f, 'name': SearchDatastore_Task, 'duration_secs': 0.014193} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.591412] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.591669] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] cb35090d-bfd2-46df-8ee5-d9b068ba0a28/cb35090d-bfd2-46df-8ee5-d9b068ba0a28.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 885.591934] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3bada01e-45b4-413d-9490-ea151dad8f5e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.607940] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Waiting for the task: (returnval){ [ 885.607940] env[69927]: value = "task-4095972" [ 885.607940] env[69927]: _type = "Task" [ 885.607940] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.613764] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "66ba8086-2dd4-4d02-aac3-1bbb4a404784" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 885.613888] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "66ba8086-2dd4-4d02-aac3-1bbb4a404784" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 885.614140] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "66ba8086-2dd4-4d02-aac3-1bbb4a404784-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 885.614342] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "66ba8086-2dd4-4d02-aac3-1bbb4a404784-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 885.614516] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "66ba8086-2dd4-4d02-aac3-1bbb4a404784-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 885.622508] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4095972, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.622825] env[69927]: INFO nova.compute.manager [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Terminating instance [ 885.626586] env[69927]: DEBUG nova.compute.manager [req-b5603f6a-118e-4647-963e-56416bd364ce req-2c7aee3f-8bfb-4cbc-9acc-343d94c47b7c service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Received event network-changed-b0d2f460-225f-47fe-96d1-bdefd018eac5 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 885.626769] env[69927]: DEBUG nova.compute.manager [req-b5603f6a-118e-4647-963e-56416bd364ce req-2c7aee3f-8bfb-4cbc-9acc-343d94c47b7c service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Refreshing instance network info cache due to event network-changed-b0d2f460-225f-47fe-96d1-bdefd018eac5. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 885.626983] env[69927]: DEBUG oslo_concurrency.lockutils [req-b5603f6a-118e-4647-963e-56416bd364ce req-2c7aee3f-8bfb-4cbc-9acc-343d94c47b7c service nova] Acquiring lock "refresh_cache-480a672c-cb48-45e3-86bd-1741957a5124" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.627142] env[69927]: DEBUG oslo_concurrency.lockutils [req-b5603f6a-118e-4647-963e-56416bd364ce req-2c7aee3f-8bfb-4cbc-9acc-343d94c47b7c service nova] Acquired lock "refresh_cache-480a672c-cb48-45e3-86bd-1741957a5124" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.627287] env[69927]: DEBUG nova.network.neutron [req-b5603f6a-118e-4647-963e-56416bd364ce req-2c7aee3f-8bfb-4cbc-9acc-343d94c47b7c service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Refreshing network info cache for port b0d2f460-225f-47fe-96d1-bdefd018eac5 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 885.664819] env[69927]: DEBUG nova.network.neutron [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Successfully updated port: e789eded-c761-41d1-90f9-0541da7d7635 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 885.765225] env[69927]: DEBUG nova.scheduler.client.report [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 885.820194] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.057812] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0aa099f-f4c8-4d9d-a8c7-dac4bf7b6d3d tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Volume attach. Driver type: vmdk {{(pid=69927) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 886.058964] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0aa099f-f4c8-4d9d-a8c7-dac4bf7b6d3d tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] [instance: 8442f144-2be4-4634-b151-62f049a975b6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811440', 'volume_id': 'a42dd037-f6b1-4b77-ab7f-5045e67cd2b5', 'name': 'volume-a42dd037-f6b1-4b77-ab7f-5045e67cd2b5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8442f144-2be4-4634-b151-62f049a975b6', 'attached_at': '', 'detached_at': '', 'volume_id': 'a42dd037-f6b1-4b77-ab7f-5045e67cd2b5', 'serial': 'a42dd037-f6b1-4b77-ab7f-5045e67cd2b5'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 886.058964] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-852377b6-4d03-4428-88df-0c22eaab1223 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.067993] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095971, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083804} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.080175] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 886.081137] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62e31ac5-8110-41bb-b79b-d16e6354190c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.084270] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96b3fd3f-7ddf-4445-85f4-8dd4d8367081 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.135552] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] b67630a4-2c1a-440b-af82-80c908ffa6e9/b67630a4-2c1a-440b-af82-80c908ffa6e9.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 886.143741] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0aa099f-f4c8-4d9d-a8c7-dac4bf7b6d3d tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] volume-a42dd037-f6b1-4b77-ab7f-5045e67cd2b5/volume-a42dd037-f6b1-4b77-ab7f-5045e67cd2b5.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 886.146344] env[69927]: DEBUG nova.compute.manager [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 886.146621] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 886.146892] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a2c9d7c-0b40-4c80-85f4-f82c234e558d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.164378] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-142fa916-a71a-4df3-b4c8-20351d46d2a2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.177211] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2d14a57b-20c8-4f04-9569-1626391b5dac {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.179152] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Acquiring lock "refresh_cache-480a672c-cb48-45e3-86bd-1741957a5124" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.186562] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4095972, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.190753] env[69927]: DEBUG oslo_vmware.api [None req-c0aa099f-f4c8-4d9d-a8c7-dac4bf7b6d3d tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Waiting for the task: (returnval){ [ 886.190753] env[69927]: value = "task-4095973" [ 886.190753] env[69927]: _type = "Task" [ 886.190753] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.191203] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 886.191203] env[69927]: value = "task-4095974" [ 886.191203] env[69927]: _type = "Task" [ 886.191203] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.191406] env[69927]: DEBUG oslo_vmware.api [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 886.191406] env[69927]: value = "task-4095975" [ 886.191406] env[69927]: _type = "Task" [ 886.191406] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.213178] env[69927]: DEBUG oslo_vmware.api [None req-c0aa099f-f4c8-4d9d-a8c7-dac4bf7b6d3d tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Task: {'id': task-4095973, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.220771] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095974, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.221099] env[69927]: DEBUG oslo_vmware.api [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095975, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.242955] env[69927]: DEBUG nova.network.neutron [req-b5603f6a-118e-4647-963e-56416bd364ce req-2c7aee3f-8bfb-4cbc-9acc-343d94c47b7c service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 886.274591] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.806s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 886.275554] env[69927]: DEBUG nova.compute.manager [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 886.279363] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.196s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.281910] env[69927]: INFO nova.compute.claims [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 886.545584] env[69927]: DEBUG nova.network.neutron [req-b5603f6a-118e-4647-963e-56416bd364ce req-2c7aee3f-8bfb-4cbc-9acc-343d94c47b7c service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.650547] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4095972, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.015611} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.650847] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] cb35090d-bfd2-46df-8ee5-d9b068ba0a28/cb35090d-bfd2-46df-8ee5-d9b068ba0a28.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 886.651089] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 886.651380] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-40cb11b3-5321-445f-a628-65e76d15b9b5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.660275] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Waiting for the task: (returnval){ [ 886.660275] env[69927]: value = "task-4095976" [ 886.660275] env[69927]: _type = "Task" [ 886.660275] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.672489] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4095976, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.714738] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095974, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.714738] env[69927]: DEBUG oslo_vmware.api [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095975, 'name': PowerOffVM_Task, 'duration_secs': 0.432344} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.719445] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 886.719445] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Volume detach. Driver type: vmdk {{(pid=69927) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 886.719734] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811437', 'volume_id': '9bd71aad-d455-47e7-a389-ca7f925cbe29', 'name': 'volume-9bd71aad-d455-47e7-a389-ca7f925cbe29', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '66ba8086-2dd4-4d02-aac3-1bbb4a404784', 'attached_at': '', 'detached_at': '', 'volume_id': '9bd71aad-d455-47e7-a389-ca7f925cbe29', 'serial': '9bd71aad-d455-47e7-a389-ca7f925cbe29'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 886.720518] env[69927]: DEBUG oslo_vmware.api [None req-c0aa099f-f4c8-4d9d-a8c7-dac4bf7b6d3d tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Task: {'id': task-4095973, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.721125] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53f87630-e51a-46e8-8dcd-f461e8ebcdce {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.745948] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8feb8b-4991-40ed-8ebf-b7f58db09b64 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.754204] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-267bfbe3-904e-4061-a85b-c8867ebe5c56 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.777456] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb63a8c0-7819-4d1c-8497-24316b2eeff5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.797418] env[69927]: DEBUG nova.compute.utils [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 886.800820] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] The volume has not been displaced from its original location: [datastore1] volume-9bd71aad-d455-47e7-a389-ca7f925cbe29/volume-9bd71aad-d455-47e7-a389-ca7f925cbe29.vmdk. No consolidation needed. {{(pid=69927) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 886.807578] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Reconfiguring VM instance instance-00000030 to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 886.807578] env[69927]: DEBUG nova.compute.manager [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 886.807578] env[69927]: DEBUG nova.network.neutron [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 886.809264] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b03b7e31-dd2b-4278-930f-b78843c18439 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.832386] env[69927]: DEBUG oslo_vmware.api [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 886.832386] env[69927]: value = "task-4095977" [ 886.832386] env[69927]: _type = "Task" [ 886.832386] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.843479] env[69927]: DEBUG oslo_vmware.api [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095977, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.916057] env[69927]: DEBUG nova.policy [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5187d174055c4ccaa5c805ac2cad225f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '81b92defba9241bca7e1db3e91030712', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 887.048868] env[69927]: DEBUG oslo_concurrency.lockutils [req-b5603f6a-118e-4647-963e-56416bd364ce req-2c7aee3f-8bfb-4cbc-9acc-343d94c47b7c service nova] Releasing lock "refresh_cache-480a672c-cb48-45e3-86bd-1741957a5124" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 887.049230] env[69927]: DEBUG nova.compute.manager [req-b5603f6a-118e-4647-963e-56416bd364ce req-2c7aee3f-8bfb-4cbc-9acc-343d94c47b7c service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Received event network-changed-699de5cd-28f8-453d-8f0a-7856f2d6a2b0 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 887.049455] env[69927]: DEBUG nova.compute.manager [req-b5603f6a-118e-4647-963e-56416bd364ce req-2c7aee3f-8bfb-4cbc-9acc-343d94c47b7c service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Refreshing instance network info cache due to event network-changed-699de5cd-28f8-453d-8f0a-7856f2d6a2b0. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 887.049735] env[69927]: DEBUG oslo_concurrency.lockutils [req-b5603f6a-118e-4647-963e-56416bd364ce req-2c7aee3f-8bfb-4cbc-9acc-343d94c47b7c service nova] Acquiring lock "refresh_cache-a2b1684f-82af-42fc-925e-db36f31cfe63" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.049877] env[69927]: DEBUG oslo_concurrency.lockutils [req-b5603f6a-118e-4647-963e-56416bd364ce req-2c7aee3f-8bfb-4cbc-9acc-343d94c47b7c service nova] Acquired lock "refresh_cache-a2b1684f-82af-42fc-925e-db36f31cfe63" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 887.050167] env[69927]: DEBUG nova.network.neutron [req-b5603f6a-118e-4647-963e-56416bd364ce req-2c7aee3f-8bfb-4cbc-9acc-343d94c47b7c service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Refreshing network info cache for port 699de5cd-28f8-453d-8f0a-7856f2d6a2b0 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 887.051438] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Acquired lock "refresh_cache-480a672c-cb48-45e3-86bd-1741957a5124" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 887.051586] env[69927]: DEBUG nova.network.neutron [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 887.061627] env[69927]: DEBUG nova.network.neutron [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Successfully updated port: 0a7e52ca-6c1d-456c-a839-ce5f626c210d {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 887.172916] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4095976, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.207079} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.173327] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 887.174210] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8feed0a-6d64-436d-a70f-97d8f921d224 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.204170] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] cb35090d-bfd2-46df-8ee5-d9b068ba0a28/cb35090d-bfd2-46df-8ee5-d9b068ba0a28.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 887.204885] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-802f2cc7-aa50-4919-b2a3-84e27c8ea003 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.235050] env[69927]: DEBUG oslo_vmware.api [None req-c0aa099f-f4c8-4d9d-a8c7-dac4bf7b6d3d tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Task: {'id': task-4095973, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.239373] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Waiting for the task: (returnval){ [ 887.239373] env[69927]: value = "task-4095978" [ 887.239373] env[69927]: _type = "Task" [ 887.239373] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.239747] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095974, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.251818] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4095978, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.307992] env[69927]: DEBUG nova.compute.manager [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 887.346673] env[69927]: DEBUG oslo_vmware.api [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095977, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.563636] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Acquiring lock "refresh_cache-aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.563891] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Acquired lock "refresh_cache-aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 887.563891] env[69927]: DEBUG nova.network.neutron [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 887.623211] env[69927]: DEBUG nova.network.neutron [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 887.746409] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095974, 'name': ReconfigVM_Task, 'duration_secs': 1.169595} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.746902] env[69927]: DEBUG oslo_vmware.api [None req-c0aa099f-f4c8-4d9d-a8c7-dac4bf7b6d3d tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Task: {'id': task-4095973, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.747717] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Reconfigured VM instance instance-00000031 to attach disk [datastore1] b67630a4-2c1a-440b-af82-80c908ffa6e9/b67630a4-2c1a-440b-af82-80c908ffa6e9.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 887.763217] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-460a534e-b66e-429f-8185-d198ceb7b93c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.776032] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4095978, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.780417] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 887.780417] env[69927]: value = "task-4095979" [ 887.780417] env[69927]: _type = "Task" [ 887.780417] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.798785] env[69927]: DEBUG nova.compute.manager [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Received event network-vif-plugged-e789eded-c761-41d1-90f9-0541da7d7635 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 887.799645] env[69927]: DEBUG oslo_concurrency.lockutils [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] Acquiring lock "480a672c-cb48-45e3-86bd-1741957a5124-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 887.799965] env[69927]: DEBUG oslo_concurrency.lockutils [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] Lock "480a672c-cb48-45e3-86bd-1741957a5124-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 887.800236] env[69927]: DEBUG oslo_concurrency.lockutils [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] Lock "480a672c-cb48-45e3-86bd-1741957a5124-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 887.800495] env[69927]: DEBUG nova.compute.manager [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] No waiting events found dispatching network-vif-plugged-e789eded-c761-41d1-90f9-0541da7d7635 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 887.800735] env[69927]: WARNING nova.compute.manager [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Received unexpected event network-vif-plugged-e789eded-c761-41d1-90f9-0541da7d7635 for instance with vm_state building and task_state spawning. [ 887.800977] env[69927]: DEBUG nova.compute.manager [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Received event network-changed-e789eded-c761-41d1-90f9-0541da7d7635 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 887.802544] env[69927]: DEBUG nova.compute.manager [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Refreshing instance network info cache due to event network-changed-e789eded-c761-41d1-90f9-0541da7d7635. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 887.802544] env[69927]: DEBUG oslo_concurrency.lockutils [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] Acquiring lock "refresh_cache-480a672c-cb48-45e3-86bd-1741957a5124" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.809330] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095979, 'name': Rename_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.844855] env[69927]: DEBUG oslo_vmware.api [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095977, 'name': ReconfigVM_Task, 'duration_secs': 0.686209} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.851520] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Reconfigured VM instance instance-00000030 to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 887.864150] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b47d993d-9127-4559-ae22-3169d7ada284 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.875651] env[69927]: DEBUG nova.network.neutron [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Successfully created port: 703aee39-6cbb-4cc4-8043-e405ac713dda {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 887.889710] env[69927]: DEBUG oslo_vmware.api [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 887.889710] env[69927]: value = "task-4095980" [ 887.889710] env[69927]: _type = "Task" [ 887.889710] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.900419] env[69927]: DEBUG oslo_vmware.api [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095980, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.064718] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cdf8ccd-c937-41af-b692-d16cd34d3df4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.075842] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81dc9dd9-a29d-4d44-9485-d9681e0788f8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.117468] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-147856e9-16f6-464b-a24b-0575062fc725 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.120943] env[69927]: DEBUG nova.network.neutron [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 888.130992] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d337277-9755-40b3-937a-af968e78229c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.147327] env[69927]: DEBUG nova.compute.provider_tree [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 888.218083] env[69927]: DEBUG oslo_vmware.api [None req-c0aa099f-f4c8-4d9d-a8c7-dac4bf7b6d3d tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Task: {'id': task-4095973, 'name': ReconfigVM_Task, 'duration_secs': 1.760351} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.218404] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0aa099f-f4c8-4d9d-a8c7-dac4bf7b6d3d tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Reconfigured VM instance instance-0000000f to attach disk [datastore1] volume-a42dd037-f6b1-4b77-ab7f-5045e67cd2b5/volume-a42dd037-f6b1-4b77-ab7f-5045e67cd2b5.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 888.224223] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b461fc8f-284f-4e23-a312-95458b7eeaa5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.244176] env[69927]: DEBUG oslo_vmware.api [None req-c0aa099f-f4c8-4d9d-a8c7-dac4bf7b6d3d tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Waiting for the task: (returnval){ [ 888.244176] env[69927]: value = "task-4095981" [ 888.244176] env[69927]: _type = "Task" [ 888.244176] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.261727] env[69927]: DEBUG oslo_vmware.api [None req-c0aa099f-f4c8-4d9d-a8c7-dac4bf7b6d3d tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Task: {'id': task-4095981, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.266613] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4095978, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.299895] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095979, 'name': Rename_Task, 'duration_secs': 0.191804} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.300374] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 888.300570] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea95cb3f-9890-45e2-9f64-fe3aaf3b73b8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.309162] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 888.309162] env[69927]: value = "task-4095982" [ 888.309162] env[69927]: _type = "Task" [ 888.309162] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.318700] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095982, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.322110] env[69927]: DEBUG nova.compute.manager [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 888.333529] env[69927]: DEBUG nova.network.neutron [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Updating instance_info_cache with network_info: [{"id": "fa4af0b2-0db2-4cb8-bfd1-8a09dc8c41d1", "address": "fa:16:3e:2a:51:b2", "network": {"id": "214a9cc8-61b1-414a-a20b-99fd5eff1bad", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1135975108", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.128", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6be96c1988054f0894a0b91881870c3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d177c5b3-a5b1-4c78-854e-7e0dbf341ea1", "external-id": "nsx-vlan-transportzone-54", "segmentation_id": 54, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa4af0b2-0d", "ovs_interfaceid": "fa4af0b2-0db2-4cb8-bfd1-8a09dc8c41d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b0d2f460-225f-47fe-96d1-bdefd018eac5", "address": "fa:16:3e:a9:6c:9c", "network": {"id": "db4ac49a-9497-49a6-9ca7-d4c93b807d7a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-757684927", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.77", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "6be96c1988054f0894a0b91881870c3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d413776-9a8c-4afd-856f-10dbb062ca95", "external-id": "nsx-vlan-transportzone-913", "segmentation_id": 913, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0d2f460-22", "ovs_interfaceid": "b0d2f460-225f-47fe-96d1-bdefd018eac5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e789eded-c761-41d1-90f9-0541da7d7635", "address": "fa:16:3e:41:c2:a4", "network": {"id": "214a9cc8-61b1-414a-a20b-99fd5eff1bad", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1135975108", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.239", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6be96c1988054f0894a0b91881870c3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d177c5b3-a5b1-4c78-854e-7e0dbf341ea1", "external-id": "nsx-vlan-transportzone-54", "segmentation_id": 54, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape789eded-c7", "ovs_interfaceid": "e789eded-c761-41d1-90f9-0541da7d7635", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.362945] env[69927]: DEBUG nova.network.neutron [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Updating instance_info_cache with network_info: [{"id": "0a7e52ca-6c1d-456c-a839-ce5f626c210d", "address": "fa:16:3e:5e:10:ad", "network": {"id": "183df4de-622d-41f7-941d-f44276468dcd", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1089604059-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a40e3fd86af540ba921103886c84b280", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a7e52ca-6c", "ovs_interfaceid": "0a7e52ca-6c1d-456c-a839-ce5f626c210d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.389176] env[69927]: DEBUG nova.virt.hardware [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 888.389546] env[69927]: DEBUG nova.virt.hardware [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 888.389725] env[69927]: DEBUG nova.virt.hardware [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 888.390014] env[69927]: DEBUG nova.virt.hardware [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 888.390086] env[69927]: DEBUG nova.virt.hardware [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 888.390252] env[69927]: DEBUG nova.virt.hardware [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 888.390448] env[69927]: DEBUG nova.virt.hardware [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 888.390618] env[69927]: DEBUG nova.virt.hardware [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 888.390795] env[69927]: DEBUG nova.virt.hardware [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 888.390993] env[69927]: DEBUG nova.virt.hardware [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 888.391199] env[69927]: DEBUG nova.virt.hardware [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 888.392424] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d07f3c35-b914-4f64-8810-acd5cbe26072 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.405408] env[69927]: DEBUG oslo_vmware.api [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095980, 'name': ReconfigVM_Task, 'duration_secs': 0.163946} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.408663] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811437', 'volume_id': '9bd71aad-d455-47e7-a389-ca7f925cbe29', 'name': 'volume-9bd71aad-d455-47e7-a389-ca7f925cbe29', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '66ba8086-2dd4-4d02-aac3-1bbb4a404784', 'attached_at': '', 'detached_at': '', 'volume_id': '9bd71aad-d455-47e7-a389-ca7f925cbe29', 'serial': '9bd71aad-d455-47e7-a389-ca7f925cbe29'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 888.408918] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 888.413461] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-226b581f-2bb3-428b-848e-01b7e97047ec {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.415817] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5110247-4047-44fe-8cdb-cb208c058aa9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.433936] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 888.434232] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7d40abcd-3e8e-48a8-a4f5-04ac170dc811 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.512582] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 888.512880] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 888.513261] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Deleting the datastore file [datastore2] 66ba8086-2dd4-4d02-aac3-1bbb4a404784 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 888.513654] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8fd90534-b01e-44fe-a69a-122c1666809b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.523097] env[69927]: DEBUG oslo_vmware.api [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 888.523097] env[69927]: value = "task-4095984" [ 888.523097] env[69927]: _type = "Task" [ 888.523097] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.533270] env[69927]: DEBUG oslo_vmware.api [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095984, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.535584] env[69927]: DEBUG nova.network.neutron [req-b5603f6a-118e-4647-963e-56416bd364ce req-2c7aee3f-8bfb-4cbc-9acc-343d94c47b7c service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Updated VIF entry in instance network info cache for port 699de5cd-28f8-453d-8f0a-7856f2d6a2b0. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 888.535584] env[69927]: DEBUG nova.network.neutron [req-b5603f6a-118e-4647-963e-56416bd364ce req-2c7aee3f-8bfb-4cbc-9acc-343d94c47b7c service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Updating instance_info_cache with network_info: [{"id": "699de5cd-28f8-453d-8f0a-7856f2d6a2b0", "address": "fa:16:3e:87:8d:a8", "network": {"id": "4a19c4d6-f840-486a-b113-3ec076b5b34d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-437129762-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b7ae5270b0643e6b5720d4f2f765d74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "510d3c47-3615-43d5-aa5d-a279fd915e71", "external-id": "nsx-vlan-transportzone-436", "segmentation_id": 436, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap699de5cd-28", "ovs_interfaceid": "699de5cd-28f8-453d-8f0a-7856f2d6a2b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.652331] env[69927]: DEBUG nova.scheduler.client.report [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 888.757653] env[69927]: DEBUG oslo_vmware.api [None req-c0aa099f-f4c8-4d9d-a8c7-dac4bf7b6d3d tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Task: {'id': task-4095981, 'name': ReconfigVM_Task, 'duration_secs': 0.200762} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.758483] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0aa099f-f4c8-4d9d-a8c7-dac4bf7b6d3d tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811440', 'volume_id': 'a42dd037-f6b1-4b77-ab7f-5045e67cd2b5', 'name': 'volume-a42dd037-f6b1-4b77-ab7f-5045e67cd2b5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8442f144-2be4-4634-b151-62f049a975b6', 'attached_at': '', 'detached_at': '', 'volume_id': 'a42dd037-f6b1-4b77-ab7f-5045e67cd2b5', 'serial': 'a42dd037-f6b1-4b77-ab7f-5045e67cd2b5'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 888.763475] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4095978, 'name': ReconfigVM_Task, 'duration_secs': 1.332371} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.764118] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Reconfigured VM instance instance-00000034 to attach disk [datastore1] cb35090d-bfd2-46df-8ee5-d9b068ba0a28/cb35090d-bfd2-46df-8ee5-d9b068ba0a28.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 888.764894] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd21f9c9-b84a-4362-9b5e-5cb2408f32d9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.772974] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Waiting for the task: (returnval){ [ 888.772974] env[69927]: value = "task-4095985" [ 888.772974] env[69927]: _type = "Task" [ 888.772974] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.783262] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4095985, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.820237] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095982, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.838062] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Releasing lock "refresh_cache-480a672c-cb48-45e3-86bd-1741957a5124" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 888.838310] env[69927]: DEBUG nova.compute.manager [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Instance network_info: |[{"id": "fa4af0b2-0db2-4cb8-bfd1-8a09dc8c41d1", "address": "fa:16:3e:2a:51:b2", "network": {"id": "214a9cc8-61b1-414a-a20b-99fd5eff1bad", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1135975108", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.128", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6be96c1988054f0894a0b91881870c3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d177c5b3-a5b1-4c78-854e-7e0dbf341ea1", "external-id": "nsx-vlan-transportzone-54", "segmentation_id": 54, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa4af0b2-0d", "ovs_interfaceid": "fa4af0b2-0db2-4cb8-bfd1-8a09dc8c41d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b0d2f460-225f-47fe-96d1-bdefd018eac5", "address": "fa:16:3e:a9:6c:9c", "network": {"id": "db4ac49a-9497-49a6-9ca7-d4c93b807d7a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-757684927", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.77", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "6be96c1988054f0894a0b91881870c3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d413776-9a8c-4afd-856f-10dbb062ca95", "external-id": "nsx-vlan-transportzone-913", "segmentation_id": 913, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0d2f460-22", "ovs_interfaceid": "b0d2f460-225f-47fe-96d1-bdefd018eac5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e789eded-c761-41d1-90f9-0541da7d7635", "address": "fa:16:3e:41:c2:a4", "network": {"id": "214a9cc8-61b1-414a-a20b-99fd5eff1bad", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1135975108", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.239", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6be96c1988054f0894a0b91881870c3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d177c5b3-a5b1-4c78-854e-7e0dbf341ea1", "external-id": "nsx-vlan-transportzone-54", "segmentation_id": 54, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape789eded-c7", "ovs_interfaceid": "e789eded-c761-41d1-90f9-0541da7d7635", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 888.838715] env[69927]: DEBUG oslo_concurrency.lockutils [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] Acquired lock "refresh_cache-480a672c-cb48-45e3-86bd-1741957a5124" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 888.838994] env[69927]: DEBUG nova.network.neutron [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Refreshing network info cache for port e789eded-c761-41d1-90f9-0541da7d7635 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 888.840759] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:51:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd177c5b3-a5b1-4c78-854e-7e0dbf341ea1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fa4af0b2-0db2-4cb8-bfd1-8a09dc8c41d1', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:6c:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7d413776-9a8c-4afd-856f-10dbb062ca95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b0d2f460-225f-47fe-96d1-bdefd018eac5', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:c2:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd177c5b3-a5b1-4c78-854e-7e0dbf341ea1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e789eded-c761-41d1-90f9-0541da7d7635', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 888.859797] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Creating folder: Project (6be96c1988054f0894a0b91881870c3c). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 888.861755] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2f679f8b-9aec-4026-89d8-e0369b4bcbce {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.865887] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Releasing lock "refresh_cache-aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 888.866101] env[69927]: DEBUG nova.compute.manager [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Instance network_info: |[{"id": "0a7e52ca-6c1d-456c-a839-ce5f626c210d", "address": "fa:16:3e:5e:10:ad", "network": {"id": "183df4de-622d-41f7-941d-f44276468dcd", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1089604059-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a40e3fd86af540ba921103886c84b280", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a7e52ca-6c", "ovs_interfaceid": "0a7e52ca-6c1d-456c-a839-ce5f626c210d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 888.867615] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:10:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fc48e29b-113c-4849-850c-35435eab4052', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0a7e52ca-6c1d-456c-a839-ce5f626c210d', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 888.878466] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Creating folder: Project (a40e3fd86af540ba921103886c84b280). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 888.881082] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eb6d32cb-2332-417e-91d0-a4fd9875f8ee {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.883963] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Created folder: Project (6be96c1988054f0894a0b91881870c3c) in parent group-v811283. [ 888.884252] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Creating folder: Instances. Parent ref: group-v811445. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 888.884581] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e3b51fe2-52bb-4b00-90a2-0e8a9f3a4063 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.898414] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Created folder: Project (a40e3fd86af540ba921103886c84b280) in parent group-v811283. [ 888.898716] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Creating folder: Instances. Parent ref: group-v811446. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 888.900865] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3a0e3ec3-244e-455a-9323-b86e4afad506 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.903241] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Created folder: Instances in parent group-v811445. [ 888.903535] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 888.903820] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 888.904640] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a2993499-52ca-4734-a926-a0f14e004494 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.938037] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Created folder: Instances in parent group-v811446. [ 888.938037] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 888.938341] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 888.938900] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c79fd1a-c12d-41ff-8431-dda8ca7a9afd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.959336] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 888.959336] env[69927]: value = "task-4095990" [ 888.959336] env[69927]: _type = "Task" [ 888.959336] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.966330] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 888.966330] env[69927]: value = "task-4095991" [ 888.966330] env[69927]: _type = "Task" [ 888.966330] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.970386] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095990, 'name': CreateVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.979720] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095991, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.036635] env[69927]: DEBUG oslo_vmware.api [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4095984, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159288} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.037954] env[69927]: DEBUG oslo_concurrency.lockutils [req-b5603f6a-118e-4647-963e-56416bd364ce req-2c7aee3f-8bfb-4cbc-9acc-343d94c47b7c service nova] Releasing lock "refresh_cache-a2b1684f-82af-42fc-925e-db36f31cfe63" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 889.037954] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 889.037954] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 889.037954] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 889.038207] env[69927]: INFO nova.compute.manager [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Took 2.89 seconds to destroy the instance on the hypervisor. [ 889.038379] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 889.038634] env[69927]: DEBUG nova.compute.manager [-] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 889.038708] env[69927]: DEBUG nova.network.neutron [-] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 889.160700] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.881s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.161334] env[69927]: DEBUG nova.compute.manager [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 889.164527] env[69927]: DEBUG oslo_concurrency.lockutils [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.567s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 889.165472] env[69927]: DEBUG nova.objects.instance [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Lazy-loading 'resources' on Instance uuid 8de4160d-2282-4ed3-bdf0-349445a6eab8 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 889.289282] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4095985, 'name': Rename_Task, 'duration_secs': 0.16446} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.292053] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 889.292053] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a15c8141-ddec-48eb-a5e3-babcb0ecb1c2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.299229] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Waiting for the task: (returnval){ [ 889.299229] env[69927]: value = "task-4095992" [ 889.299229] env[69927]: _type = "Task" [ 889.299229] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.308647] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4095992, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.322278] env[69927]: DEBUG oslo_vmware.api [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4095982, 'name': PowerOnVM_Task, 'duration_secs': 0.530544} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.322621] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 889.322845] env[69927]: DEBUG nova.compute.manager [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 889.323857] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a469dd29-c6fd-46b0-a315-5d6bc71a0705 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.472105] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095990, 'name': CreateVM_Task, 'duration_secs': 0.525049} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.474926] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 889.478135] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.478323] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 889.478649] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 889.479354] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-712dad45-0ab8-465f-b262-53014b5e2215 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.484704] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4095991, 'name': CreateVM_Task, 'duration_secs': 0.397457} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.485222] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 889.485965] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.486151] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 889.486470] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 889.486732] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ef0c117-4681-4653-83b8-0e139b3786b7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.489896] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Waiting for the task: (returnval){ [ 889.489896] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ec0000-7bfe-c95a-c66d-13b1b9804847" [ 889.489896] env[69927]: _type = "Task" [ 889.489896] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.496231] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Waiting for the task: (returnval){ [ 889.496231] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528ab36f-daad-2b9f-9aba-0858c8a7b561" [ 889.496231] env[69927]: _type = "Task" [ 889.496231] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.505575] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ec0000-7bfe-c95a-c66d-13b1b9804847, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.511331] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528ab36f-daad-2b9f-9aba-0858c8a7b561, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.634088] env[69927]: DEBUG nova.network.neutron [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Updated VIF entry in instance network info cache for port e789eded-c761-41d1-90f9-0541da7d7635. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 889.634088] env[69927]: DEBUG nova.network.neutron [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Updating instance_info_cache with network_info: [{"id": "fa4af0b2-0db2-4cb8-bfd1-8a09dc8c41d1", "address": "fa:16:3e:2a:51:b2", "network": {"id": "214a9cc8-61b1-414a-a20b-99fd5eff1bad", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1135975108", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.128", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6be96c1988054f0894a0b91881870c3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d177c5b3-a5b1-4c78-854e-7e0dbf341ea1", "external-id": "nsx-vlan-transportzone-54", "segmentation_id": 54, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa4af0b2-0d", "ovs_interfaceid": "fa4af0b2-0db2-4cb8-bfd1-8a09dc8c41d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b0d2f460-225f-47fe-96d1-bdefd018eac5", "address": "fa:16:3e:a9:6c:9c", "network": {"id": "db4ac49a-9497-49a6-9ca7-d4c93b807d7a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-757684927", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.77", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "6be96c1988054f0894a0b91881870c3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d413776-9a8c-4afd-856f-10dbb062ca95", "external-id": "nsx-vlan-transportzone-913", "segmentation_id": 913, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0d2f460-22", "ovs_interfaceid": "b0d2f460-225f-47fe-96d1-bdefd018eac5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e789eded-c761-41d1-90f9-0541da7d7635", "address": "fa:16:3e:41:c2:a4", "network": {"id": "214a9cc8-61b1-414a-a20b-99fd5eff1bad", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1135975108", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.239", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6be96c1988054f0894a0b91881870c3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d177c5b3-a5b1-4c78-854e-7e0dbf341ea1", "external-id": "nsx-vlan-transportzone-54", "segmentation_id": 54, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape789eded-c7", "ovs_interfaceid": "e789eded-c761-41d1-90f9-0541da7d7635", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.671557] env[69927]: DEBUG nova.compute.utils [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 889.674956] env[69927]: DEBUG nova.compute.manager [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 889.674956] env[69927]: DEBUG nova.network.neutron [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 889.741149] env[69927]: DEBUG nova.policy [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cd1a4b19b9874a17bde997440649c7e5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1c5a402c4ef2452b9809e30a2fe91431', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 889.812921] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4095992, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.817605] env[69927]: DEBUG nova.objects.instance [None req-c0aa099f-f4c8-4d9d-a8c7-dac4bf7b6d3d tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Lazy-loading 'flavor' on Instance uuid 8442f144-2be4-4634-b151-62f049a975b6 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 889.848257] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 889.939562] env[69927]: DEBUG nova.compute.manager [req-8b811a31-bab9-4f3f-93f4-68b97175087e req-e1581c86-e1e7-46aa-83c7-d681131c2de4 service nova] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Received event network-vif-deleted-0a3c7cb5-f42d-407f-8561-e9c2695bced8 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 889.939851] env[69927]: INFO nova.compute.manager [req-8b811a31-bab9-4f3f-93f4-68b97175087e req-e1581c86-e1e7-46aa-83c7-d681131c2de4 service nova] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Neutron deleted interface 0a3c7cb5-f42d-407f-8561-e9c2695bced8; detaching it from the instance and deleting it from the info cache [ 889.940072] env[69927]: DEBUG nova.network.neutron [req-8b811a31-bab9-4f3f-93f4-68b97175087e req-e1581c86-e1e7-46aa-83c7-d681131c2de4 service nova] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.009024] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ec0000-7bfe-c95a-c66d-13b1b9804847, 'name': SearchDatastore_Task, 'duration_secs': 0.013418} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.009024] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 890.009569] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 890.009569] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.009569] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.010047] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 890.010557] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b718603c-e411-451b-9804-3667d4cddb5d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.017182] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528ab36f-daad-2b9f-9aba-0858c8a7b561, 'name': SearchDatastore_Task, 'duration_secs': 0.02395} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.017659] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 890.017897] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 890.018138] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.018286] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.018466] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 890.018723] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-86397066-48b5-4d52-937e-0a10fde5f666 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.026869] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 890.027081] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 890.027788] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a351104-1828-40b0-8335-fcadf402c18b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.032955] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 890.033157] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 890.037358] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7399ab02-3ce9-469e-bd6e-7aabbaeb5f0d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.039602] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Waiting for the task: (returnval){ [ 890.039602] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5290d3a1-6e54-95c9-81c1-c8a3b5cf6800" [ 890.039602] env[69927]: _type = "Task" [ 890.039602] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.047319] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Waiting for the task: (returnval){ [ 890.047319] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523978b2-8c33-9789-e123-9f66b88e82bb" [ 890.047319] env[69927]: _type = "Task" [ 890.047319] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.056883] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5290d3a1-6e54-95c9-81c1-c8a3b5cf6800, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.065603] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523978b2-8c33-9789-e123-9f66b88e82bb, 'name': SearchDatastore_Task, 'duration_secs': 0.019238} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.066486] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7970e46e-754b-4e88-a041-306353da9c0f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.073358] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Waiting for the task: (returnval){ [ 890.073358] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f58dc8-0516-edb9-6f61-b88fb6b95511" [ 890.073358] env[69927]: _type = "Task" [ 890.073358] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.083144] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f58dc8-0516-edb9-6f61-b88fb6b95511, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.137369] env[69927]: DEBUG oslo_concurrency.lockutils [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] Releasing lock "refresh_cache-480a672c-cb48-45e3-86bd-1741957a5124" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 890.137580] env[69927]: DEBUG nova.compute.manager [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Received event network-vif-plugged-0a7e52ca-6c1d-456c-a839-ce5f626c210d {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 890.137752] env[69927]: DEBUG oslo_concurrency.lockutils [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] Acquiring lock "aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.137968] env[69927]: DEBUG oslo_concurrency.lockutils [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] Lock "aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.138148] env[69927]: DEBUG oslo_concurrency.lockutils [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] Lock "aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.138319] env[69927]: DEBUG nova.compute.manager [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] No waiting events found dispatching network-vif-plugged-0a7e52ca-6c1d-456c-a839-ce5f626c210d {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 890.138501] env[69927]: WARNING nova.compute.manager [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Received unexpected event network-vif-plugged-0a7e52ca-6c1d-456c-a839-ce5f626c210d for instance with vm_state building and task_state spawning. [ 890.138666] env[69927]: DEBUG nova.compute.manager [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Received event network-changed-0a7e52ca-6c1d-456c-a839-ce5f626c210d {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 890.138820] env[69927]: DEBUG nova.compute.manager [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Refreshing instance network info cache due to event network-changed-0a7e52ca-6c1d-456c-a839-ce5f626c210d. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 890.139024] env[69927]: DEBUG oslo_concurrency.lockutils [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] Acquiring lock "refresh_cache-aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.139857] env[69927]: DEBUG oslo_concurrency.lockutils [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] Acquired lock "refresh_cache-aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.139857] env[69927]: DEBUG nova.network.neutron [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Refreshing network info cache for port 0a7e52ca-6c1d-456c-a839-ce5f626c210d {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 890.142068] env[69927]: DEBUG nova.network.neutron [-] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.179018] env[69927]: DEBUG nova.compute.manager [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 890.312687] env[69927]: DEBUG oslo_vmware.api [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4095992, 'name': PowerOnVM_Task, 'duration_secs': 0.601966} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.315367] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 890.315593] env[69927]: INFO nova.compute.manager [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Took 7.53 seconds to spawn the instance on the hypervisor. [ 890.315810] env[69927]: DEBUG nova.compute.manager [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 890.317164] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1031eb5a-e060-4124-8ee4-862e36045930 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.328208] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c0aa099f-f4c8-4d9d-a8c7-dac4bf7b6d3d tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Lock "8442f144-2be4-4634-b151-62f049a975b6" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.909s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.350558] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ad76821-c617-4b00-b8d1-a5c06583d77a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.362658] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d7f1686-1bce-4f0f-aee6-ad02c777b408 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.397508] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea07351-2763-40a8-9f53-ef75cf942348 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.404869] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a58603f-c6a8-4886-a23e-cff4642bcd2c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.421529] env[69927]: DEBUG nova.compute.provider_tree [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 890.445296] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-39155bb2-36d6-49f5-b475-6dbd74dd763d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.456284] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b542708-823a-460b-b8f5-6e975e24db75 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.471033] env[69927]: DEBUG nova.network.neutron [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Successfully updated port: 703aee39-6cbb-4cc4-8043-e405ac713dda {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 890.493938] env[69927]: DEBUG nova.compute.manager [req-8b811a31-bab9-4f3f-93f4-68b97175087e req-e1581c86-e1e7-46aa-83c7-d681131c2de4 service nova] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Detach interface failed, port_id=0a3c7cb5-f42d-407f-8561-e9c2695bced8, reason: Instance 66ba8086-2dd4-4d02-aac3-1bbb4a404784 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 890.552528] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5290d3a1-6e54-95c9-81c1-c8a3b5cf6800, 'name': SearchDatastore_Task, 'duration_secs': 0.019005} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.553730] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f5031a8-5483-4d88-a32e-ea0139cb3903 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.559916] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Waiting for the task: (returnval){ [ 890.559916] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ba287b-e3d4-ed15-05e8-c5308ffc2094" [ 890.559916] env[69927]: _type = "Task" [ 890.559916] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.572024] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ba287b-e3d4-ed15-05e8-c5308ffc2094, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.578820] env[69927]: DEBUG nova.network.neutron [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Successfully created port: 4b7f5764-23c4-40f8-a618-0c608e0e987d {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 890.585289] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f58dc8-0516-edb9-6f61-b88fb6b95511, 'name': SearchDatastore_Task, 'duration_secs': 0.029936} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.585289] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 890.585636] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4/aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 890.585896] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3bc895ff-3214-48eb-8928-e581b6fa1054 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.597524] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Waiting for the task: (returnval){ [ 890.597524] env[69927]: value = "task-4095993" [ 890.597524] env[69927]: _type = "Task" [ 890.597524] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.606130] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Task: {'id': task-4095993, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.648380] env[69927]: INFO nova.compute.manager [-] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Took 1.61 seconds to deallocate network for instance. [ 890.780737] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Acquiring lock "5f67d6a0-e4b7-435e-8991-0f54e0379d22" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.781024] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Lock "5f67d6a0-e4b7-435e-8991-0f54e0379d22" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.781258] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Acquiring lock "5f67d6a0-e4b7-435e-8991-0f54e0379d22-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.781407] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Lock "5f67d6a0-e4b7-435e-8991-0f54e0379d22-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.781577] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Lock "5f67d6a0-e4b7-435e-8991-0f54e0379d22-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.784608] env[69927]: INFO nova.compute.manager [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Terminating instance [ 890.842647] env[69927]: INFO nova.compute.manager [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Took 45.90 seconds to build instance. [ 890.962976] env[69927]: ERROR nova.scheduler.client.report [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] [req-73d0c627-8f5c-4d18-ac84-eefbb155d2f0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2f529b36-df5f-4b37-8103-68f74f737726. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-73d0c627-8f5c-4d18-ac84-eefbb155d2f0"}]} [ 890.973362] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquiring lock "refresh_cache-07484a6c-f9d1-405b-9ae4-a1b830f474ed" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.974528] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquired lock "refresh_cache-07484a6c-f9d1-405b-9ae4-a1b830f474ed" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.974528] env[69927]: DEBUG nova.network.neutron [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 890.988941] env[69927]: DEBUG nova.scheduler.client.report [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Refreshing inventories for resource provider 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 891.011172] env[69927]: DEBUG nova.scheduler.client.report [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Updating ProviderTree inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 891.011421] env[69927]: DEBUG nova.compute.provider_tree [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 891.034555] env[69927]: DEBUG nova.scheduler.client.report [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Refreshing aggregate associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, aggregates: None {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 891.065970] env[69927]: DEBUG nova.scheduler.client.report [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Refreshing trait associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 891.081898] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ba287b-e3d4-ed15-05e8-c5308ffc2094, 'name': SearchDatastore_Task, 'duration_secs': 0.019703} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.082205] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.082588] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 480a672c-cb48-45e3-86bd-1741957a5124/480a672c-cb48-45e3-86bd-1741957a5124.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 891.082956] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d449ec43-a5de-484e-8574-b18f41fc66ae {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.092783] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Waiting for the task: (returnval){ [ 891.092783] env[69927]: value = "task-4095994" [ 891.092783] env[69927]: _type = "Task" [ 891.092783] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.105444] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4095994, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.111901] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Task: {'id': task-4095993, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.181418] env[69927]: DEBUG nova.network.neutron [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Updated VIF entry in instance network info cache for port 0a7e52ca-6c1d-456c-a839-ce5f626c210d. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 891.182454] env[69927]: DEBUG nova.network.neutron [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Updating instance_info_cache with network_info: [{"id": "0a7e52ca-6c1d-456c-a839-ce5f626c210d", "address": "fa:16:3e:5e:10:ad", "network": {"id": "183df4de-622d-41f7-941d-f44276468dcd", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1089604059-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a40e3fd86af540ba921103886c84b280", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a7e52ca-6c", "ovs_interfaceid": "0a7e52ca-6c1d-456c-a839-ce5f626c210d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.191662] env[69927]: DEBUG nova.compute.manager [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 891.228023] env[69927]: INFO nova.compute.manager [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Took 0.58 seconds to detach 1 volumes for instance. [ 891.247114] env[69927]: DEBUG nova.virt.hardware [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 891.247357] env[69927]: DEBUG nova.virt.hardware [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 891.247515] env[69927]: DEBUG nova.virt.hardware [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 891.247733] env[69927]: DEBUG nova.virt.hardware [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 891.247893] env[69927]: DEBUG nova.virt.hardware [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 891.248055] env[69927]: DEBUG nova.virt.hardware [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 891.248316] env[69927]: DEBUG nova.virt.hardware [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 891.248475] env[69927]: DEBUG nova.virt.hardware [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 891.248663] env[69927]: DEBUG nova.virt.hardware [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 891.248823] env[69927]: DEBUG nova.virt.hardware [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 891.249015] env[69927]: DEBUG nova.virt.hardware [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 891.250235] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5a964ab-4b79-4cb7-8623-234cc0d4a2a7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.259602] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef1313e-cf85-444e-a78a-c1e4a7181fcb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.290315] env[69927]: DEBUG nova.compute.manager [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 891.290315] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 891.290315] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-04812f77-b3a4-4a53-8c1b-47af6ad57d08 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.301107] env[69927]: DEBUG oslo_vmware.api [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Waiting for the task: (returnval){ [ 891.301107] env[69927]: value = "task-4095995" [ 891.301107] env[69927]: _type = "Task" [ 891.301107] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.311270] env[69927]: DEBUG oslo_vmware.api [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4095995, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.345930] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5281e053-0841-4b08-87e7-1a125d6ef202 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Lock "cb35090d-bfd2-46df-8ee5-d9b068ba0a28" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.506s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 891.532487] env[69927]: DEBUG nova.network.neutron [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 891.622670] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Task: {'id': task-4095993, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.705701} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.622956] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4095994, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.629496] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4/aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 891.629769] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 891.631218] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5f397fb5-11d0-47cb-a508-6120b659ba05 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.639299] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Waiting for the task: (returnval){ [ 891.639299] env[69927]: value = "task-4095996" [ 891.639299] env[69927]: _type = "Task" [ 891.639299] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.659386] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Task: {'id': task-4095996, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.685275] env[69927]: DEBUG oslo_concurrency.lockutils [req-81ad2bbd-d77c-46df-a312-897d107b83b8 req-43f0c929-f8de-419c-8c63-c93148bbaa1e service nova] Releasing lock "refresh_cache-aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.742019] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 891.761469] env[69927]: DEBUG nova.network.neutron [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Updating instance_info_cache with network_info: [{"id": "703aee39-6cbb-4cc4-8043-e405ac713dda", "address": "fa:16:3e:db:16:8a", "network": {"id": "c75059d6-4484-4557-9698-a7b62151aeda", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1147445131-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81b92defba9241bca7e1db3e91030712", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap703aee39-6c", "ovs_interfaceid": "703aee39-6cbb-4cc4-8043-e405ac713dda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.818030] env[69927]: DEBUG oslo_vmware.api [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4095995, 'name': PowerOffVM_Task, 'duration_secs': 0.350165} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.818030] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 891.818030] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Volume detach. Driver type: vmdk {{(pid=69927) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 891.818030] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811295', 'volume_id': 'fc20d8d5-9567-4a28-a803-7b54361ba124', 'name': 'volume-fc20d8d5-9567-4a28-a803-7b54361ba124', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5f67d6a0-e4b7-435e-8991-0f54e0379d22', 'attached_at': '', 'detached_at': '', 'volume_id': 'fc20d8d5-9567-4a28-a803-7b54361ba124', 'serial': 'fc20d8d5-9567-4a28-a803-7b54361ba124'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 891.818030] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2357e57-a67b-4f21-9bf1-280148149b5f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.844266] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5307830-2c9a-4082-b8ff-5622e4b36881 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.853712] env[69927]: DEBUG nova.compute.manager [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 891.858521] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48e3ee53-fe43-412c-9488-b1634d72b49c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.886239] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b42b4c7-5286-4e8b-b4b0-633a4c96f05f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.905183] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] The volume has not been displaced from its original location: [datastore1] volume-fc20d8d5-9567-4a28-a803-7b54361ba124/volume-fc20d8d5-9567-4a28-a803-7b54361ba124.vmdk. No consolidation needed. {{(pid=69927) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 891.911151] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Reconfiguring VM instance instance-00000012 to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 891.915380] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57b84c76-c529-4124-8127-e68f52dcf2d6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.935908] env[69927]: DEBUG oslo_vmware.api [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Waiting for the task: (returnval){ [ 891.935908] env[69927]: value = "task-4095997" [ 891.935908] env[69927]: _type = "Task" [ 891.935908] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.947179] env[69927]: DEBUG oslo_vmware.api [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4095997, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.947742] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94711ea7-39a5-4517-abd2-d98cc47a5edd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.955834] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0cb7d67-10f0-4644-b43d-1848970f5ba6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.994957] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb99e1f4-9789-448a-9b96-a52aab520f53 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.008152] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05993815-eeeb-493b-9d5a-9b0213d7c037 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.027977] env[69927]: DEBUG nova.compute.provider_tree [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 892.108661] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4095994, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.593487} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.108960] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 480a672c-cb48-45e3-86bd-1741957a5124/480a672c-cb48-45e3-86bd-1741957a5124.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 892.109210] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 892.109475] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a73df5c6-39a8-4f11-931a-21bec323e960 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.120166] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Waiting for the task: (returnval){ [ 892.120166] env[69927]: value = "task-4095998" [ 892.120166] env[69927]: _type = "Task" [ 892.120166] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.133998] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4095998, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.153118] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Task: {'id': task-4095996, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.261482} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.154473] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 892.154702] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98d82b9e-d8a8-4833-ae89-c551f17a4db3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.180554] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4/aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 892.181311] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3c20626-839c-4994-af37-c15510994fe0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.203420] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Waiting for the task: (returnval){ [ 892.203420] env[69927]: value = "task-4095999" [ 892.203420] env[69927]: _type = "Task" [ 892.203420] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.214111] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Task: {'id': task-4095999, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.262723] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Releasing lock "refresh_cache-07484a6c-f9d1-405b-9ae4-a1b830f474ed" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.263117] env[69927]: DEBUG nova.compute.manager [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Instance network_info: |[{"id": "703aee39-6cbb-4cc4-8043-e405ac713dda", "address": "fa:16:3e:db:16:8a", "network": {"id": "c75059d6-4484-4557-9698-a7b62151aeda", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1147445131-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81b92defba9241bca7e1db3e91030712", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap703aee39-6c", "ovs_interfaceid": "703aee39-6cbb-4cc4-8043-e405ac713dda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 892.263627] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:16:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f2e45023-22b5-458b-826e-9b7eb69ba028', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '703aee39-6cbb-4cc4-8043-e405ac713dda', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 892.271468] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 892.272428] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 892.272428] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8a02ad4a-f817-4adc-ae29-86bed511ed2c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.294749] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 892.294749] env[69927]: value = "task-4096000" [ 892.294749] env[69927]: _type = "Task" [ 892.294749] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.303680] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096000, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.395295] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.447528] env[69927]: DEBUG oslo_vmware.api [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4095997, 'name': ReconfigVM_Task, 'duration_secs': 0.257431} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.448067] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Reconfigured VM instance instance-00000012 to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 892.453740] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b6eb610-b67c-454e-b546-b5dfad795b86 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.475268] env[69927]: DEBUG oslo_vmware.api [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Waiting for the task: (returnval){ [ 892.475268] env[69927]: value = "task-4096001" [ 892.475268] env[69927]: _type = "Task" [ 892.475268] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.485702] env[69927]: DEBUG oslo_vmware.api [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4096001, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.504896] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "b67630a4-2c1a-440b-af82-80c908ffa6e9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.505044] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "b67630a4-2c1a-440b-af82-80c908ffa6e9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.505148] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "b67630a4-2c1a-440b-af82-80c908ffa6e9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.505380] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "b67630a4-2c1a-440b-af82-80c908ffa6e9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.505639] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "b67630a4-2c1a-440b-af82-80c908ffa6e9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.508489] env[69927]: INFO nova.compute.manager [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Terminating instance [ 892.532330] env[69927]: DEBUG nova.scheduler.client.report [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 892.578009] env[69927]: DEBUG nova.compute.manager [req-00dd95c1-737a-4c11-89e7-253baa79a1ea req-82be914c-da84-4c0c-a71e-9ceb4c3500ca service nova] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Received event network-vif-plugged-703aee39-6cbb-4cc4-8043-e405ac713dda {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 892.578257] env[69927]: DEBUG oslo_concurrency.lockutils [req-00dd95c1-737a-4c11-89e7-253baa79a1ea req-82be914c-da84-4c0c-a71e-9ceb4c3500ca service nova] Acquiring lock "07484a6c-f9d1-405b-9ae4-a1b830f474ed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.578453] env[69927]: DEBUG oslo_concurrency.lockutils [req-00dd95c1-737a-4c11-89e7-253baa79a1ea req-82be914c-da84-4c0c-a71e-9ceb4c3500ca service nova] Lock "07484a6c-f9d1-405b-9ae4-a1b830f474ed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.578624] env[69927]: DEBUG oslo_concurrency.lockutils [req-00dd95c1-737a-4c11-89e7-253baa79a1ea req-82be914c-da84-4c0c-a71e-9ceb4c3500ca service nova] Lock "07484a6c-f9d1-405b-9ae4-a1b830f474ed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.578902] env[69927]: DEBUG nova.compute.manager [req-00dd95c1-737a-4c11-89e7-253baa79a1ea req-82be914c-da84-4c0c-a71e-9ceb4c3500ca service nova] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] No waiting events found dispatching network-vif-plugged-703aee39-6cbb-4cc4-8043-e405ac713dda {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 892.578990] env[69927]: WARNING nova.compute.manager [req-00dd95c1-737a-4c11-89e7-253baa79a1ea req-82be914c-da84-4c0c-a71e-9ceb4c3500ca service nova] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Received unexpected event network-vif-plugged-703aee39-6cbb-4cc4-8043-e405ac713dda for instance with vm_state building and task_state spawning. [ 892.579459] env[69927]: DEBUG nova.compute.manager [req-00dd95c1-737a-4c11-89e7-253baa79a1ea req-82be914c-da84-4c0c-a71e-9ceb4c3500ca service nova] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Received event network-changed-703aee39-6cbb-4cc4-8043-e405ac713dda {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 892.579459] env[69927]: DEBUG nova.compute.manager [req-00dd95c1-737a-4c11-89e7-253baa79a1ea req-82be914c-da84-4c0c-a71e-9ceb4c3500ca service nova] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Refreshing instance network info cache due to event network-changed-703aee39-6cbb-4cc4-8043-e405ac713dda. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 892.579459] env[69927]: DEBUG oslo_concurrency.lockutils [req-00dd95c1-737a-4c11-89e7-253baa79a1ea req-82be914c-da84-4c0c-a71e-9ceb4c3500ca service nova] Acquiring lock "refresh_cache-07484a6c-f9d1-405b-9ae4-a1b830f474ed" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.579702] env[69927]: DEBUG oslo_concurrency.lockutils [req-00dd95c1-737a-4c11-89e7-253baa79a1ea req-82be914c-da84-4c0c-a71e-9ceb4c3500ca service nova] Acquired lock "refresh_cache-07484a6c-f9d1-405b-9ae4-a1b830f474ed" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.579946] env[69927]: DEBUG nova.network.neutron [req-00dd95c1-737a-4c11-89e7-253baa79a1ea req-82be914c-da84-4c0c-a71e-9ceb4c3500ca service nova] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Refreshing network info cache for port 703aee39-6cbb-4cc4-8043-e405ac713dda {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 892.630656] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4095998, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.238352} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.630952] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 892.631751] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5373d5d3-398f-4de8-8d77-2aedf61edd79 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.660233] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 480a672c-cb48-45e3-86bd-1741957a5124/480a672c-cb48-45e3-86bd-1741957a5124.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 892.661097] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05d6ad55-b3d1-4701-98e1-02ea1147fb94 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.682458] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Waiting for the task: (returnval){ [ 892.682458] env[69927]: value = "task-4096002" [ 892.682458] env[69927]: _type = "Task" [ 892.682458] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.691356] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096002, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.714148] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Task: {'id': task-4095999, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.785826] env[69927]: DEBUG nova.network.neutron [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Successfully updated port: 4b7f5764-23c4-40f8-a618-0c608e0e987d {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 892.808561] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096000, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.874422] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1768789e-1e13-4268-a149-5e982001cff0 tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Acquiring lock "8442f144-2be4-4634-b151-62f049a975b6" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.874845] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1768789e-1e13-4268-a149-5e982001cff0 tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Lock "8442f144-2be4-4634-b151-62f049a975b6" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.987356] env[69927]: DEBUG oslo_vmware.api [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4096001, 'name': ReconfigVM_Task, 'duration_secs': 0.31286} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.987630] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811295', 'volume_id': 'fc20d8d5-9567-4a28-a803-7b54361ba124', 'name': 'volume-fc20d8d5-9567-4a28-a803-7b54361ba124', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5f67d6a0-e4b7-435e-8991-0f54e0379d22', 'attached_at': '', 'detached_at': '', 'volume_id': 'fc20d8d5-9567-4a28-a803-7b54361ba124', 'serial': 'fc20d8d5-9567-4a28-a803-7b54361ba124'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 892.987932] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 892.988799] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92c6dcc-75aa-4a88-9ed9-ffb922882eb3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.996558] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 892.996816] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-898006eb-7249-4a66-a941-171b24ca0a01 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.012968] env[69927]: DEBUG nova.compute.manager [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 893.013129] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 893.014037] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe6d218a-b884-402c-b70e-3dac9ff96af6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.023090] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 893.023373] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-450e76d1-6a93-4158-9f4f-aa8f1551ae9a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.030420] env[69927]: DEBUG oslo_vmware.api [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 893.030420] env[69927]: value = "task-4096004" [ 893.030420] env[69927]: _type = "Task" [ 893.030420] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.040626] env[69927]: DEBUG oslo_concurrency.lockutils [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.876s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.044852] env[69927]: DEBUG oslo_vmware.api [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096004, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.045435] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.231s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 893.045734] env[69927]: DEBUG nova.objects.instance [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Lazy-loading 'resources' on Instance uuid c7451ca3-f1fc-469b-b9d2-7fe24cb8949e {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 893.068617] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 893.068929] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 893.069138] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Deleting the datastore file [datastore1] 5f67d6a0-e4b7-435e-8991-0f54e0379d22 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 893.069373] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-292734e6-0461-4c96-92a6-5e4589eb2352 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.077135] env[69927]: DEBUG oslo_vmware.api [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Waiting for the task: (returnval){ [ 893.077135] env[69927]: value = "task-4096005" [ 893.077135] env[69927]: _type = "Task" [ 893.077135] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.094632] env[69927]: DEBUG oslo_vmware.api [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4096005, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.109273] env[69927]: INFO nova.scheduler.client.report [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Deleted allocations for instance 8de4160d-2282-4ed3-bdf0-349445a6eab8 [ 893.193772] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096002, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.215954] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Task: {'id': task-4095999, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.290157] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "refresh_cache-a9a62523-50fb-44b2-bfc8-9c6664dbf050" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.290157] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquired lock "refresh_cache-a9a62523-50fb-44b2-bfc8-9c6664dbf050" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 893.290157] env[69927]: DEBUG nova.network.neutron [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 893.309800] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096000, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.382866] env[69927]: INFO nova.compute.manager [None req-1768789e-1e13-4268-a149-5e982001cff0 tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Detaching volume a42dd037-f6b1-4b77-ab7f-5045e67cd2b5 [ 893.443631] env[69927]: INFO nova.virt.block_device [None req-1768789e-1e13-4268-a149-5e982001cff0 tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Attempting to driver detach volume a42dd037-f6b1-4b77-ab7f-5045e67cd2b5 from mountpoint /dev/sdb [ 893.443971] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1768789e-1e13-4268-a149-5e982001cff0 tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Volume detach. Driver type: vmdk {{(pid=69927) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 893.444198] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1768789e-1e13-4268-a149-5e982001cff0 tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] [instance: 8442f144-2be4-4634-b151-62f049a975b6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811440', 'volume_id': 'a42dd037-f6b1-4b77-ab7f-5045e67cd2b5', 'name': 'volume-a42dd037-f6b1-4b77-ab7f-5045e67cd2b5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8442f144-2be4-4634-b151-62f049a975b6', 'attached_at': '', 'detached_at': '', 'volume_id': 'a42dd037-f6b1-4b77-ab7f-5045e67cd2b5', 'serial': 'a42dd037-f6b1-4b77-ab7f-5045e67cd2b5'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 893.445567] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a94c7925-2248-45ba-b414-b507eeb6646b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.473826] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b023e0-b3f6-43b7-8b51-9c441e627acb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.483098] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b764fe7-c0a8-4b4e-b302-013985ec42d6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.507629] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d188fb1-5dda-40e9-9e95-447ebcfeb672 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.524498] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1768789e-1e13-4268-a149-5e982001cff0 tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] The volume has not been displaced from its original location: [datastore1] volume-a42dd037-f6b1-4b77-ab7f-5045e67cd2b5/volume-a42dd037-f6b1-4b77-ab7f-5045e67cd2b5.vmdk. No consolidation needed. {{(pid=69927) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 893.530962] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1768789e-1e13-4268-a149-5e982001cff0 tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Reconfiguring VM instance instance-0000000f to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 893.531431] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-96fe19b8-7b88-4e4d-b32b-7b8b1f0e5935 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.564261] env[69927]: DEBUG oslo_vmware.api [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096004, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.567462] env[69927]: DEBUG nova.network.neutron [req-00dd95c1-737a-4c11-89e7-253baa79a1ea req-82be914c-da84-4c0c-a71e-9ceb4c3500ca service nova] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Updated VIF entry in instance network info cache for port 703aee39-6cbb-4cc4-8043-e405ac713dda. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 893.567462] env[69927]: DEBUG nova.network.neutron [req-00dd95c1-737a-4c11-89e7-253baa79a1ea req-82be914c-da84-4c0c-a71e-9ceb4c3500ca service nova] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Updating instance_info_cache with network_info: [{"id": "703aee39-6cbb-4cc4-8043-e405ac713dda", "address": "fa:16:3e:db:16:8a", "network": {"id": "c75059d6-4484-4557-9698-a7b62151aeda", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1147445131-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81b92defba9241bca7e1db3e91030712", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2e45023-22b5-458b-826e-9b7eb69ba028", "external-id": "nsx-vlan-transportzone-614", "segmentation_id": 614, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap703aee39-6c", "ovs_interfaceid": "703aee39-6cbb-4cc4-8043-e405ac713dda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.568375] env[69927]: DEBUG oslo_vmware.api [None req-1768789e-1e13-4268-a149-5e982001cff0 tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Waiting for the task: (returnval){ [ 893.568375] env[69927]: value = "task-4096006" [ 893.568375] env[69927]: _type = "Task" [ 893.568375] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.578940] env[69927]: DEBUG oslo_vmware.api [None req-1768789e-1e13-4268-a149-5e982001cff0 tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Task: {'id': task-4096006, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.588619] env[69927]: DEBUG oslo_vmware.api [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Task: {'id': task-4096005, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15221} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.589078] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 893.589078] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 893.589321] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 893.589450] env[69927]: INFO nova.compute.manager [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Took 2.30 seconds to destroy the instance on the hypervisor. [ 893.589651] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 893.589870] env[69927]: DEBUG nova.compute.manager [-] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 893.590182] env[69927]: DEBUG nova.network.neutron [-] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 893.617398] env[69927]: DEBUG oslo_concurrency.lockutils [None req-99ed1abd-90ad-4b07-b829-cf33449910df tempest-ServersNegativeTestMultiTenantJSON-908142255 tempest-ServersNegativeTestMultiTenantJSON-908142255-project-member] Lock "8de4160d-2282-4ed3-bdf0-349445a6eab8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.390s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.694062] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096002, 'name': ReconfigVM_Task, 'duration_secs': 0.700799} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.694362] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 480a672c-cb48-45e3-86bd-1741957a5124/480a672c-cb48-45e3-86bd-1741957a5124.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 893.695226] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1f7d3b91-c3a6-469a-a2e0-f664f9383e09 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.704890] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Waiting for the task: (returnval){ [ 893.704890] env[69927]: value = "task-4096007" [ 893.704890] env[69927]: _type = "Task" [ 893.704890] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.724034] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096007, 'name': Rename_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.724034] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Task: {'id': task-4095999, 'name': ReconfigVM_Task, 'duration_secs': 1.217281} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.724353] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Reconfigured VM instance instance-00000035 to attach disk [datastore2] aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4/aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 893.724683] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cd969362-9ed9-4589-95d8-aa45e5a6974e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.731614] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Waiting for the task: (returnval){ [ 893.731614] env[69927]: value = "task-4096008" [ 893.731614] env[69927]: _type = "Task" [ 893.731614] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.740695] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Task: {'id': task-4096008, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.809078] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096000, 'name': CreateVM_Task, 'duration_secs': 1.150833} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.809078] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 893.809078] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.809078] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 893.809456] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 893.809634] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02d47524-44f0-47f4-bcf4-42c3571e7088 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.815460] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for the task: (returnval){ [ 893.815460] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521cf603-6b04-4b1a-900f-b357fc59ddb8" [ 893.815460] env[69927]: _type = "Task" [ 893.815460] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.827845] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521cf603-6b04-4b1a-900f-b357fc59ddb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.969993] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "5581f8af-9796-48ad-a2f3-557e90d9662a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 893.970376] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "5581f8af-9796-48ad-a2f3-557e90d9662a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 894.043883] env[69927]: DEBUG oslo_vmware.api [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096004, 'name': PowerOffVM_Task, 'duration_secs': 0.552246} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.044416] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 894.044606] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 894.047590] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bda40e27-d9e0-4ed7-a31d-bcc2805f1f55 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.078068] env[69927]: DEBUG oslo_concurrency.lockutils [req-00dd95c1-737a-4c11-89e7-253baa79a1ea req-82be914c-da84-4c0c-a71e-9ceb4c3500ca service nova] Releasing lock "refresh_cache-07484a6c-f9d1-405b-9ae4-a1b830f474ed" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 894.086636] env[69927]: DEBUG oslo_vmware.api [None req-1768789e-1e13-4268-a149-5e982001cff0 tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Task: {'id': task-4096006, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.128045] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 894.128268] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 894.128456] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Deleting the datastore file [datastore1] b67630a4-2c1a-440b-af82-80c908ffa6e9 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 894.128747] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b41e2e9-24e8-4e48-ae96-103b2ab6f2d5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.138055] env[69927]: DEBUG oslo_vmware.api [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 894.138055] env[69927]: value = "task-4096010" [ 894.138055] env[69927]: _type = "Task" [ 894.138055] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.148362] env[69927]: DEBUG oslo_vmware.api [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096010, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.159022] env[69927]: DEBUG nova.network.neutron [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 894.190210] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03fa5813-1fe6-4e4d-a12d-c648836803d8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.199663] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fb96459-c555-406b-9d91-7e2840c2a2cf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.246229] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2b2626-56f2-4231-9ea9-d3c2c9bcec47 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.252389] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096007, 'name': Rename_Task, 'duration_secs': 0.262284} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.253294] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 894.253544] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8bb60441-ca55-4831-b02c-e751b541267f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.261320] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Task: {'id': task-4096008, 'name': Rename_Task, 'duration_secs': 0.455738} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.262776] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 894.264096] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90068197-7fe8-4363-9c08-3d24c81884bb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.268656] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Waiting for the task: (returnval){ [ 894.268656] env[69927]: value = "task-4096011" [ 894.268656] env[69927]: _type = "Task" [ 894.268656] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.268858] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1799a83f-3194-48e7-82fe-4866bd7ab1ad {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.285572] env[69927]: DEBUG nova.compute.provider_tree [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 894.293772] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096011, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.295410] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Waiting for the task: (returnval){ [ 894.295410] env[69927]: value = "task-4096012" [ 894.295410] env[69927]: _type = "Task" [ 894.295410] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.305881] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Task: {'id': task-4096012, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.329974] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521cf603-6b04-4b1a-900f-b357fc59ddb8, 'name': SearchDatastore_Task, 'duration_secs': 0.037773} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.334024] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 894.334024] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 894.334024] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.334024] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 894.334024] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 894.334024] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b954101d-a9ca-46bb-b2d0-597eeb7743fd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.346964] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 894.347188] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 894.347961] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e7161dc-2adb-435a-9048-fefd1c654946 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.353853] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for the task: (returnval){ [ 894.353853] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52392f5a-3820-7299-5606-6737238eace0" [ 894.353853] env[69927]: _type = "Task" [ 894.353853] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.362356] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52392f5a-3820-7299-5606-6737238eace0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.562300] env[69927]: DEBUG nova.network.neutron [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Updating instance_info_cache with network_info: [{"id": "4b7f5764-23c4-40f8-a618-0c608e0e987d", "address": "fa:16:3e:47:aa:b6", "network": {"id": "41648ae7-ad4c-489e-b90f-65446dc0b1d1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1522598229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c5a402c4ef2452b9809e30a2fe91431", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c8a5d7c-ee1f-4a41-94e4-db31e85a398d", "external-id": "cl2-zone-613", "segmentation_id": 613, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b7f5764-23", "ovs_interfaceid": "4b7f5764-23c4-40f8-a618-0c608e0e987d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.585040] env[69927]: DEBUG oslo_vmware.api [None req-1768789e-1e13-4268-a149-5e982001cff0 tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Task: {'id': task-4096006, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.649648] env[69927]: DEBUG oslo_vmware.api [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096010, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16831} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.650027] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 894.650230] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 894.650411] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 894.650588] env[69927]: INFO nova.compute.manager [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Took 1.64 seconds to destroy the instance on the hypervisor. [ 894.650838] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 894.651464] env[69927]: DEBUG nova.compute.manager [-] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 894.651464] env[69927]: DEBUG nova.network.neutron [-] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 894.781655] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096011, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.789782] env[69927]: DEBUG nova.scheduler.client.report [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 894.806212] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Task: {'id': task-4096012, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.868843] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52392f5a-3820-7299-5606-6737238eace0, 'name': SearchDatastore_Task, 'duration_secs': 0.043838} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.869729] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70a8aa00-b037-40a5-acd2-4eab92dffa61 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.875680] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for the task: (returnval){ [ 894.875680] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a9f02a-7806-8142-395e-c4bb0632639f" [ 894.875680] env[69927]: _type = "Task" [ 894.875680] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.885238] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a9f02a-7806-8142-395e-c4bb0632639f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.065782] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Releasing lock "refresh_cache-a9a62523-50fb-44b2-bfc8-9c6664dbf050" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 895.066177] env[69927]: DEBUG nova.compute.manager [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Instance network_info: |[{"id": "4b7f5764-23c4-40f8-a618-0c608e0e987d", "address": "fa:16:3e:47:aa:b6", "network": {"id": "41648ae7-ad4c-489e-b90f-65446dc0b1d1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1522598229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c5a402c4ef2452b9809e30a2fe91431", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c8a5d7c-ee1f-4a41-94e4-db31e85a398d", "external-id": "cl2-zone-613", "segmentation_id": 613, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b7f5764-23", "ovs_interfaceid": "4b7f5764-23c4-40f8-a618-0c608e0e987d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 895.066660] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:aa:b6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4c8a5d7c-ee1f-4a41-94e4-db31e85a398d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4b7f5764-23c4-40f8-a618-0c608e0e987d', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 895.074529] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 895.074876] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 895.078771] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-491c7328-6ff5-43a1-9f1a-6446fd328209 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.107091] env[69927]: DEBUG oslo_vmware.api [None req-1768789e-1e13-4268-a149-5e982001cff0 tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Task: {'id': task-4096006, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.108846] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 895.108846] env[69927]: value = "task-4096013" [ 895.108846] env[69927]: _type = "Task" [ 895.108846] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.122502] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096013, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.195310] env[69927]: DEBUG nova.compute.manager [req-8fb0e9d1-de9c-4969-8293-5caa1293d8c7 req-9d2ad7a6-a708-4dc0-b258-e3efb0fd4e66 service nova] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Received event network-vif-plugged-4b7f5764-23c4-40f8-a618-0c608e0e987d {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 895.195310] env[69927]: DEBUG oslo_concurrency.lockutils [req-8fb0e9d1-de9c-4969-8293-5caa1293d8c7 req-9d2ad7a6-a708-4dc0-b258-e3efb0fd4e66 service nova] Acquiring lock "a9a62523-50fb-44b2-bfc8-9c6664dbf050-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 895.195310] env[69927]: DEBUG oslo_concurrency.lockutils [req-8fb0e9d1-de9c-4969-8293-5caa1293d8c7 req-9d2ad7a6-a708-4dc0-b258-e3efb0fd4e66 service nova] Lock "a9a62523-50fb-44b2-bfc8-9c6664dbf050-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.195310] env[69927]: DEBUG oslo_concurrency.lockutils [req-8fb0e9d1-de9c-4969-8293-5caa1293d8c7 req-9d2ad7a6-a708-4dc0-b258-e3efb0fd4e66 service nova] Lock "a9a62523-50fb-44b2-bfc8-9c6664dbf050-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.195310] env[69927]: DEBUG nova.compute.manager [req-8fb0e9d1-de9c-4969-8293-5caa1293d8c7 req-9d2ad7a6-a708-4dc0-b258-e3efb0fd4e66 service nova] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] No waiting events found dispatching network-vif-plugged-4b7f5764-23c4-40f8-a618-0c608e0e987d {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 895.195310] env[69927]: WARNING nova.compute.manager [req-8fb0e9d1-de9c-4969-8293-5caa1293d8c7 req-9d2ad7a6-a708-4dc0-b258-e3efb0fd4e66 service nova] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Received unexpected event network-vif-plugged-4b7f5764-23c4-40f8-a618-0c608e0e987d for instance with vm_state building and task_state spawning. [ 895.195310] env[69927]: DEBUG nova.compute.manager [req-8fb0e9d1-de9c-4969-8293-5caa1293d8c7 req-9d2ad7a6-a708-4dc0-b258-e3efb0fd4e66 service nova] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Received event network-changed-4b7f5764-23c4-40f8-a618-0c608e0e987d {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 895.195310] env[69927]: DEBUG nova.compute.manager [req-8fb0e9d1-de9c-4969-8293-5caa1293d8c7 req-9d2ad7a6-a708-4dc0-b258-e3efb0fd4e66 service nova] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Refreshing instance network info cache due to event network-changed-4b7f5764-23c4-40f8-a618-0c608e0e987d. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 895.195310] env[69927]: DEBUG oslo_concurrency.lockutils [req-8fb0e9d1-de9c-4969-8293-5caa1293d8c7 req-9d2ad7a6-a708-4dc0-b258-e3efb0fd4e66 service nova] Acquiring lock "refresh_cache-a9a62523-50fb-44b2-bfc8-9c6664dbf050" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.195310] env[69927]: DEBUG oslo_concurrency.lockutils [req-8fb0e9d1-de9c-4969-8293-5caa1293d8c7 req-9d2ad7a6-a708-4dc0-b258-e3efb0fd4e66 service nova] Acquired lock "refresh_cache-a9a62523-50fb-44b2-bfc8-9c6664dbf050" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 895.195310] env[69927]: DEBUG nova.network.neutron [req-8fb0e9d1-de9c-4969-8293-5caa1293d8c7 req-9d2ad7a6-a708-4dc0-b258-e3efb0fd4e66 service nova] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Refreshing network info cache for port 4b7f5764-23c4-40f8-a618-0c608e0e987d {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 895.284865] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096011, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.298667] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.250s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.299923] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.400s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.301108] env[69927]: DEBUG nova.objects.instance [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Lazy-loading 'resources' on Instance uuid 7ce79e41-333a-4ef3-ba68-f74067d4ac5a {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 895.322345] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Task: {'id': task-4096012, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.397282] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a9f02a-7806-8142-395e-c4bb0632639f, 'name': SearchDatastore_Task, 'duration_secs': 0.047452} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.397282] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 895.397282] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 07484a6c-f9d1-405b-9ae4-a1b830f474ed/07484a6c-f9d1-405b-9ae4-a1b830f474ed.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 895.397282] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b8521bd7-6978-4de3-9012-2042408a7c11 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.408949] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for the task: (returnval){ [ 895.408949] env[69927]: value = "task-4096014" [ 895.408949] env[69927]: _type = "Task" [ 895.408949] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.421521] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4096014, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.444196] env[69927]: DEBUG nova.network.neutron [-] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.545200] env[69927]: INFO nova.scheduler.client.report [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Deleted allocations for instance c7451ca3-f1fc-469b-b9d2-7fe24cb8949e [ 895.593921] env[69927]: DEBUG oslo_vmware.api [None req-1768789e-1e13-4268-a149-5e982001cff0 tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Task: {'id': task-4096006, 'name': ReconfigVM_Task, 'duration_secs': 1.607917} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.594423] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1768789e-1e13-4268-a149-5e982001cff0 tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Reconfigured VM instance instance-0000000f to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 895.601120] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1449c335-daca-44ab-89ca-00cfea7892c8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.628859] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096013, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.634334] env[69927]: DEBUG oslo_vmware.api [None req-1768789e-1e13-4268-a149-5e982001cff0 tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Waiting for the task: (returnval){ [ 895.634334] env[69927]: value = "task-4096015" [ 895.634334] env[69927]: _type = "Task" [ 895.634334] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.642471] env[69927]: DEBUG oslo_vmware.api [None req-1768789e-1e13-4268-a149-5e982001cff0 tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Task: {'id': task-4096015, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.703680] env[69927]: DEBUG nova.network.neutron [-] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.786701] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096011, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.817294] env[69927]: DEBUG oslo_vmware.api [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Task: {'id': task-4096012, 'name': PowerOnVM_Task, 'duration_secs': 1.125249} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.817294] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 895.817294] env[69927]: INFO nova.compute.manager [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Took 10.32 seconds to spawn the instance on the hypervisor. [ 895.817294] env[69927]: DEBUG nova.compute.manager [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 895.819611] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd609f7e-0d77-47bc-b19e-87cede9d4e33 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.921302] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4096014, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.951983] env[69927]: INFO nova.compute.manager [-] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Took 2.36 seconds to deallocate network for instance. [ 896.055220] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6075f799-2836-4c3b-8ddc-523b91e28aa6 tempest-TenantUsagesTestJSON-1584323566 tempest-TenantUsagesTestJSON-1584323566-project-member] Lock "c7451ca3-f1fc-469b-b9d2-7fe24cb8949e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.015s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.139053] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096013, 'name': CreateVM_Task, 'duration_secs': 0.833956} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.144262] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 896.145317] env[69927]: DEBUG nova.network.neutron [req-8fb0e9d1-de9c-4969-8293-5caa1293d8c7 req-9d2ad7a6-a708-4dc0-b258-e3efb0fd4e66 service nova] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Updated VIF entry in instance network info cache for port 4b7f5764-23c4-40f8-a618-0c608e0e987d. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 896.145718] env[69927]: DEBUG nova.network.neutron [req-8fb0e9d1-de9c-4969-8293-5caa1293d8c7 req-9d2ad7a6-a708-4dc0-b258-e3efb0fd4e66 service nova] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Updating instance_info_cache with network_info: [{"id": "4b7f5764-23c4-40f8-a618-0c608e0e987d", "address": "fa:16:3e:47:aa:b6", "network": {"id": "41648ae7-ad4c-489e-b90f-65446dc0b1d1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1522598229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c5a402c4ef2452b9809e30a2fe91431", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c8a5d7c-ee1f-4a41-94e4-db31e85a398d", "external-id": "cl2-zone-613", "segmentation_id": 613, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b7f5764-23", "ovs_interfaceid": "4b7f5764-23c4-40f8-a618-0c608e0e987d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.148301] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.148484] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 896.151956] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 896.151956] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59c7305a-02ea-46e8-a3e4-155684eb244b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.158996] env[69927]: DEBUG oslo_vmware.api [None req-1768789e-1e13-4268-a149-5e982001cff0 tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Task: {'id': task-4096015, 'name': ReconfigVM_Task, 'duration_secs': 0.293395} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.160104] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1768789e-1e13-4268-a149-5e982001cff0 tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811440', 'volume_id': 'a42dd037-f6b1-4b77-ab7f-5045e67cd2b5', 'name': 'volume-a42dd037-f6b1-4b77-ab7f-5045e67cd2b5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8442f144-2be4-4634-b151-62f049a975b6', 'attached_at': '', 'detached_at': '', 'volume_id': 'a42dd037-f6b1-4b77-ab7f-5045e67cd2b5', 'serial': 'a42dd037-f6b1-4b77-ab7f-5045e67cd2b5'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 896.168532] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 896.168532] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52abf328-c495-37fe-16a2-1072aa3cd472" [ 896.168532] env[69927]: _type = "Task" [ 896.168532] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.187616] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52abf328-c495-37fe-16a2-1072aa3cd472, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.206597] env[69927]: INFO nova.compute.manager [-] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Took 1.56 seconds to deallocate network for instance. [ 896.288289] env[69927]: DEBUG oslo_vmware.api [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096011, 'name': PowerOnVM_Task, 'duration_secs': 1.914116} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.288587] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 896.288796] env[69927]: INFO nova.compute.manager [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Took 18.47 seconds to spawn the instance on the hypervisor. [ 896.288979] env[69927]: DEBUG nova.compute.manager [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 896.290307] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-638e1680-cf91-4b1a-ae03-4a86a1fd89c1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.353045] env[69927]: INFO nova.compute.manager [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Took 47.99 seconds to build instance. [ 896.421358] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4096014, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.538192] env[69927]: INFO nova.compute.manager [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Took 0.59 seconds to detach 1 volumes for instance. [ 896.542031] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a34ea10a-d79b-4334-9021-99b17aabbc97 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.545646] env[69927]: DEBUG nova.compute.manager [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Deleting volume: fc20d8d5-9567-4a28-a803-7b54361ba124 {{(pid=69927) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 896.554852] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c408e652-e115-4604-919e-ffadeaa2e83d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.592673] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-195c15e1-4c5d-4533-bbaf-8d5279574df7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.602481] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9071a77a-9839-4085-8cb0-3a67c8cfada5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.622944] env[69927]: DEBUG nova.compute.provider_tree [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 896.664390] env[69927]: DEBUG oslo_concurrency.lockutils [req-8fb0e9d1-de9c-4969-8293-5caa1293d8c7 req-9d2ad7a6-a708-4dc0-b258-e3efb0fd4e66 service nova] Releasing lock "refresh_cache-a9a62523-50fb-44b2-bfc8-9c6664dbf050" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 896.688352] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52abf328-c495-37fe-16a2-1072aa3cd472, 'name': SearchDatastore_Task, 'duration_secs': 0.034287} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.688690] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 896.688953] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 896.689222] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.689410] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 896.689625] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 896.689926] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5dff4060-c018-49f5-9c54-652fa4ad9e79 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.707231] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 896.707231] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 896.707533] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20d165cb-47b7-4ded-b4b5-7172d1dd1922 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.716294] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 896.716294] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52272c2c-0e0a-ab02-911b-a6c7c81b8393" [ 896.716294] env[69927]: _type = "Task" [ 896.716294] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.717073] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 896.730748] env[69927]: DEBUG nova.objects.instance [None req-1768789e-1e13-4268-a149-5e982001cff0 tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Lazy-loading 'flavor' on Instance uuid 8442f144-2be4-4634-b151-62f049a975b6 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 896.732772] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52272c2c-0e0a-ab02-911b-a6c7c81b8393, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.817505] env[69927]: INFO nova.compute.manager [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Took 58.29 seconds to build instance. [ 896.856550] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56cbae79-9c23-4f9c-b322-2d5669c60ebb tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Lock "aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.783s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.920619] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4096014, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.374105} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.920904] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 07484a6c-f9d1-405b-9ae4-a1b830f474ed/07484a6c-f9d1-405b-9ae4-a1b830f474ed.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 896.921131] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 896.921473] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d8c354f0-acfb-41bc-991c-a2ef1e6ddad6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.929420] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for the task: (returnval){ [ 896.929420] env[69927]: value = "task-4096017" [ 896.929420] env[69927]: _type = "Task" [ 896.929420] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.939704] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4096017, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.953144] env[69927]: INFO nova.compute.manager [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Rebuilding instance [ 897.065148] env[69927]: DEBUG nova.compute.manager [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 897.066123] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e380df9-0bc3-46df-a406-29d1ef9a3b94 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.132398] env[69927]: DEBUG nova.scheduler.client.report [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 897.133176] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.243431] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52272c2c-0e0a-ab02-911b-a6c7c81b8393, 'name': SearchDatastore_Task, 'duration_secs': 0.057545} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.244936] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7aff4f24-f937-4131-9382-cd1d38ef007e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.251214] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 897.251214] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e8e5b2-ebd7-3ae0-c20c-4711c7870211" [ 897.251214] env[69927]: _type = "Task" [ 897.251214] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.264040] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e8e5b2-ebd7-3ae0-c20c-4711c7870211, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.322373] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d6684c6d-dca5-4b3b-9fd6-2d7f5d8de85b tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Lock "480a672c-cb48-45e3-86bd-1741957a5124" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 113.171s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.363752] env[69927]: DEBUG nova.compute.manager [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 897.440749] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4096017, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.296435} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.441359] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 897.442159] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf87e53f-572f-45c3-9855-c7042b3c9c29 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.470381] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] 07484a6c-f9d1-405b-9ae4-a1b830f474ed/07484a6c-f9d1-405b-9ae4-a1b830f474ed.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 897.472218] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c29000f9-c98c-4961-a4eb-58d26e965526 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.504949] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for the task: (returnval){ [ 897.504949] env[69927]: value = "task-4096018" [ 897.504949] env[69927]: _type = "Task" [ 897.504949] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.520088] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4096018, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.615733] env[69927]: DEBUG nova.compute.manager [req-a2462fef-9dbc-4fdb-9ed8-ba2f61bb3788 req-79f6d223-f59e-469f-be69-f85d2e9ad0ae service nova] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Received event network-vif-deleted-043de9ef-5509-4243-acb7-a19660d6d813 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 897.616158] env[69927]: DEBUG nova.compute.manager [req-a2462fef-9dbc-4fdb-9ed8-ba2f61bb3788 req-79f6d223-f59e-469f-be69-f85d2e9ad0ae service nova] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Received event network-vif-deleted-b590e270-89a9-47f6-8e4e-69b428b381a8 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 897.636648] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.336s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.638513] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.309s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.640136] env[69927]: INFO nova.compute.claims [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 897.749365] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1768789e-1e13-4268-a149-5e982001cff0 tempest-VolumesAssistedSnapshotsTest-1477467308 tempest-VolumesAssistedSnapshotsTest-1477467308-project-admin] Lock "8442f144-2be4-4634-b151-62f049a975b6" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.871s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.766776] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e8e5b2-ebd7-3ae0-c20c-4711c7870211, 'name': SearchDatastore_Task, 'duration_secs': 0.028805} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.770021] env[69927]: INFO nova.scheduler.client.report [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Deleted allocations for instance 7ce79e41-333a-4ef3-ba68-f74067d4ac5a [ 897.770021] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 897.772735] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] a9a62523-50fb-44b2-bfc8-9c6664dbf050/a9a62523-50fb-44b2-bfc8-9c6664dbf050.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 897.775700] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-30e786b4-d0ea-41bf-b1db-73790dbc3fc7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.785243] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 897.785243] env[69927]: value = "task-4096019" [ 897.785243] env[69927]: _type = "Task" [ 897.785243] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.795595] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096019, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.826016] env[69927]: DEBUG nova.compute.manager [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 897.890719] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.003532] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Acquiring lock "480a672c-cb48-45e3-86bd-1741957a5124" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.004056] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Lock "480a672c-cb48-45e3-86bd-1741957a5124" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.004349] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Acquiring lock "480a672c-cb48-45e3-86bd-1741957a5124-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.004422] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Lock "480a672c-cb48-45e3-86bd-1741957a5124-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.004582] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Lock "480a672c-cb48-45e3-86bd-1741957a5124-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.010834] env[69927]: INFO nova.compute.manager [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Terminating instance [ 898.018952] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4096018, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.087431] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 898.087431] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-62553585-0e21-4a8f-8868-34b95cbecd2a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.102363] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Waiting for the task: (returnval){ [ 898.102363] env[69927]: value = "task-4096020" [ 898.102363] env[69927]: _type = "Task" [ 898.102363] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.112252] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4096020, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.289609] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c542397f-7835-4329-a377-f9575d7e5abd tempest-ServersTestJSON-2147431032 tempest-ServersTestJSON-2147431032-project-member] Lock "7ce79e41-333a-4ef3-ba68-f74067d4ac5a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.242s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.303925] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096019, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.371035] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.518254] env[69927]: DEBUG nova.compute.manager [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 898.518493] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 898.518786] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4096018, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.519567] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae84aba8-f8b2-4f47-b3a9-eebeab3b3ed8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.527664] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 898.528022] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e8473f12-9600-437c-a052-64e1452c68ca {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.536230] env[69927]: DEBUG oslo_vmware.api [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Waiting for the task: (returnval){ [ 898.536230] env[69927]: value = "task-4096022" [ 898.536230] env[69927]: _type = "Task" [ 898.536230] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.545568] env[69927]: DEBUG oslo_vmware.api [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096022, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.619613] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4096020, 'name': PowerOffVM_Task, 'duration_secs': 0.218735} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.620012] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 898.620913] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 898.622052] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ff4976b-f406-47a3-8409-b133dc48de10 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.631871] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 898.632898] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-db4cb73e-a69c-4a01-b8c9-fb1bcf9492ee {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.662723] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 898.662869] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 898.663127] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Deleting the datastore file [datastore1] cb35090d-bfd2-46df-8ee5-d9b068ba0a28 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 898.663364] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e10f85fa-ec8a-4e02-b56d-eef59f08abe1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.672724] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Waiting for the task: (returnval){ [ 898.672724] env[69927]: value = "task-4096024" [ 898.672724] env[69927]: _type = "Task" [ 898.672724] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.685914] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4096024, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.804418] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096019, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.021526] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4096018, 'name': ReconfigVM_Task, 'duration_secs': 1.371562} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.022610] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Reconfigured VM instance instance-00000036 to attach disk [datastore2] 07484a6c-f9d1-405b-9ae4-a1b830f474ed/07484a6c-f9d1-405b-9ae4-a1b830f474ed.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 899.022610] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-018f8730-47e1-42fe-9886-9f7f55138835 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.034459] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for the task: (returnval){ [ 899.034459] env[69927]: value = "task-4096025" [ 899.034459] env[69927]: _type = "Task" [ 899.034459] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.057787] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4096025, 'name': Rename_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.063863] env[69927]: DEBUG oslo_vmware.api [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096022, 'name': PowerOffVM_Task, 'duration_secs': 0.305215} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.064487] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 899.064662] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 899.064928] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-83711a1a-d447-406d-907c-340998346d81 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.194668] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4096024, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.299343} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.194957] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 899.195166] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 899.195774] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 899.250309] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 899.252483] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 899.252883] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Deleting the datastore file [datastore1] 480a672c-cb48-45e3-86bd-1741957a5124 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 899.254128] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7dabcd6-e019-4d25-8112-1c447e3631c2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.269426] env[69927]: DEBUG oslo_vmware.api [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Waiting for the task: (returnval){ [ 899.269426] env[69927]: value = "task-4096027" [ 899.269426] env[69927]: _type = "Task" [ 899.269426] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.291449] env[69927]: DEBUG oslo_vmware.api [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096027, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.305877] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096019, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.221789} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.307487] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] a9a62523-50fb-44b2-bfc8-9c6664dbf050/a9a62523-50fb-44b2-bfc8-9c6664dbf050.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 899.307487] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 899.307715] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-13f2ee75-78e5-4daa-8b20-65ece387d613 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.319728] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 899.319728] env[69927]: value = "task-4096028" [ 899.319728] env[69927]: _type = "Task" [ 899.319728] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.332434] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096028, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.427177] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4106e6e6-062e-4231-a248-1b03a4d9fa9a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.440866] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-657723d1-ae99-4b16-8f92-ef252ce1a985 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.502171] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e30385e-fd82-475c-a80a-1aa263f8b324 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.513522] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7747a7c0-d34e-400c-b1e0-8d37d434ee36 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.539342] env[69927]: DEBUG nova.compute.provider_tree [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 899.553121] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4096025, 'name': Rename_Task, 'duration_secs': 0.282884} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.554278] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 899.554551] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d0e4eb0-8108-47b2-add7-093f1ba686b8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.562708] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for the task: (returnval){ [ 899.562708] env[69927]: value = "task-4096029" [ 899.562708] env[69927]: _type = "Task" [ 899.562708] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.573210] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4096029, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.780722] env[69927]: DEBUG oslo_vmware.api [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096027, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.221161} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.781133] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 899.781408] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 899.781673] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 899.781993] env[69927]: INFO nova.compute.manager [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Took 1.26 seconds to destroy the instance on the hypervisor. [ 899.782292] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 899.782967] env[69927]: DEBUG nova.compute.manager [-] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 899.783670] env[69927]: DEBUG nova.network.neutron [-] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 899.831184] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096028, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075457} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.831407] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 899.832088] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43e034c-19aa-46cc-8b26-87979a71518f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.856118] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] a9a62523-50fb-44b2-bfc8-9c6664dbf050/a9a62523-50fb-44b2-bfc8-9c6664dbf050.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 899.856562] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f7b6366-663f-4b22-a1d7-06f185e387ae {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.880590] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 899.880590] env[69927]: value = "task-4096030" [ 899.880590] env[69927]: _type = "Task" [ 899.880590] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.889930] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096030, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.900446] env[69927]: DEBUG oslo_concurrency.lockutils [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "f6972b90-7746-4a37-8be8-1739f96dc3dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 899.901017] env[69927]: DEBUG oslo_concurrency.lockutils [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "f6972b90-7746-4a37-8be8-1739f96dc3dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.901017] env[69927]: DEBUG oslo_concurrency.lockutils [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "f6972b90-7746-4a37-8be8-1739f96dc3dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 899.901163] env[69927]: DEBUG oslo_concurrency.lockutils [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "f6972b90-7746-4a37-8be8-1739f96dc3dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.901411] env[69927]: DEBUG oslo_concurrency.lockutils [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "f6972b90-7746-4a37-8be8-1739f96dc3dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.906788] env[69927]: INFO nova.compute.manager [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Terminating instance [ 900.050198] env[69927]: DEBUG nova.scheduler.client.report [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 900.073869] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4096029, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.245390] env[69927]: DEBUG nova.virt.hardware [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 900.245390] env[69927]: DEBUG nova.virt.hardware [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 900.245390] env[69927]: DEBUG nova.virt.hardware [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 900.245390] env[69927]: DEBUG nova.virt.hardware [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 900.245390] env[69927]: DEBUG nova.virt.hardware [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 900.245390] env[69927]: DEBUG nova.virt.hardware [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 900.245390] env[69927]: DEBUG nova.virt.hardware [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 900.247429] env[69927]: DEBUG nova.virt.hardware [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 900.247798] env[69927]: DEBUG nova.virt.hardware [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 900.248105] env[69927]: DEBUG nova.virt.hardware [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 900.248518] env[69927]: DEBUG nova.virt.hardware [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 900.250572] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00860b6b-1db7-435d-8d8a-ca63ae4d3ea7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.268818] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0de9574-22f2-4845-bdbf-4dffb5703129 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.285728] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Instance VIF info [] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 900.292513] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 900.293202] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 900.293703] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-056e2ef8-ac0f-448e-b8a9-4c8fadadee86 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.313466] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 900.313466] env[69927]: value = "task-4096031" [ 900.313466] env[69927]: _type = "Task" [ 900.313466] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.326911] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096031, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.390854] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096030, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.409942] env[69927]: DEBUG nova.compute.manager [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 900.412681] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 900.412681] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf1d6d2-ff3a-4489-9e7e-0e8a02460c92 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.422091] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 900.422398] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-157e3875-88c0-4354-be4e-682bdf33a71a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.435683] env[69927]: DEBUG oslo_vmware.api [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 900.435683] env[69927]: value = "task-4096032" [ 900.435683] env[69927]: _type = "Task" [ 900.435683] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.455062] env[69927]: DEBUG oslo_vmware.api [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096032, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.557372] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.919s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.558033] env[69927]: DEBUG nova.compute.manager [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 900.561293] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.133s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.561484] env[69927]: DEBUG nova.objects.instance [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lazy-loading 'resources' on Instance uuid a536b069-45e0-4ffe-be53-ac33f8cb6ec0 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 900.581340] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4096029, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.824764] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096031, 'name': CreateVM_Task, 'duration_secs': 0.424079} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.824937] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 900.825396] env[69927]: DEBUG oslo_concurrency.lockutils [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.825588] env[69927]: DEBUG oslo_concurrency.lockutils [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 900.825903] env[69927]: DEBUG oslo_concurrency.lockutils [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 900.826169] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe03be2c-cfca-496f-a553-da53b5c58421 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.832095] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Waiting for the task: (returnval){ [ 900.832095] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f7b862-af5f-2df6-cad6-f563828710c2" [ 900.832095] env[69927]: _type = "Task" [ 900.832095] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.843023] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f7b862-af5f-2df6-cad6-f563828710c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.896352] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096030, 'name': ReconfigVM_Task, 'duration_secs': 0.753633} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.896684] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Reconfigured VM instance instance-00000037 to attach disk [datastore2] a9a62523-50fb-44b2-bfc8-9c6664dbf050/a9a62523-50fb-44b2-bfc8-9c6664dbf050.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 900.897466] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ed7bf431-db8f-49d4-ba0d-8f94ba110bac {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.906672] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 900.906672] env[69927]: value = "task-4096033" [ 900.906672] env[69927]: _type = "Task" [ 900.906672] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.917528] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096033, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.949350] env[69927]: DEBUG oslo_vmware.api [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096032, 'name': PowerOffVM_Task, 'duration_secs': 0.325088} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.949540] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 900.949714] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 900.949975] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e4cc6491-808e-4b6e-825c-b91f9a9cc76d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.994208] env[69927]: DEBUG nova.compute.manager [req-81cc1c0b-d60e-43c2-85bc-05cbbdf15ba5 req-e096244c-5a8b-46d2-aeee-ff68930db751 service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Received event network-vif-deleted-b0d2f460-225f-47fe-96d1-bdefd018eac5 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 900.994433] env[69927]: INFO nova.compute.manager [req-81cc1c0b-d60e-43c2-85bc-05cbbdf15ba5 req-e096244c-5a8b-46d2-aeee-ff68930db751 service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Neutron deleted interface b0d2f460-225f-47fe-96d1-bdefd018eac5; detaching it from the instance and deleting it from the info cache [ 900.994776] env[69927]: DEBUG nova.network.neutron [req-81cc1c0b-d60e-43c2-85bc-05cbbdf15ba5 req-e096244c-5a8b-46d2-aeee-ff68930db751 service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Updating instance_info_cache with network_info: [{"id": "fa4af0b2-0db2-4cb8-bfd1-8a09dc8c41d1", "address": "fa:16:3e:2a:51:b2", "network": {"id": "214a9cc8-61b1-414a-a20b-99fd5eff1bad", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1135975108", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.128", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6be96c1988054f0894a0b91881870c3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d177c5b3-a5b1-4c78-854e-7e0dbf341ea1", "external-id": "nsx-vlan-transportzone-54", "segmentation_id": 54, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa4af0b2-0d", "ovs_interfaceid": "fa4af0b2-0db2-4cb8-bfd1-8a09dc8c41d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e789eded-c761-41d1-90f9-0541da7d7635", "address": "fa:16:3e:41:c2:a4", "network": {"id": "214a9cc8-61b1-414a-a20b-99fd5eff1bad", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1135975108", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.239", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6be96c1988054f0894a0b91881870c3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d177c5b3-a5b1-4c78-854e-7e0dbf341ea1", "external-id": "nsx-vlan-transportzone-54", "segmentation_id": 54, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape789eded-c7", "ovs_interfaceid": "e789eded-c761-41d1-90f9-0541da7d7635", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.024392] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 901.024658] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 901.024849] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Deleting the datastore file [datastore1] f6972b90-7746-4a37-8be8-1739f96dc3dc {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 901.025305] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7fac25b-e9e6-4cd6-a6c5-70bdcba25861 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.035332] env[69927]: DEBUG oslo_vmware.api [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 901.035332] env[69927]: value = "task-4096035" [ 901.035332] env[69927]: _type = "Task" [ 901.035332] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.045580] env[69927]: DEBUG oslo_vmware.api [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096035, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.070260] env[69927]: DEBUG nova.compute.utils [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 901.070260] env[69927]: DEBUG nova.compute.manager [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 901.070260] env[69927]: DEBUG nova.network.neutron [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 901.083666] env[69927]: DEBUG oslo_vmware.api [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4096029, 'name': PowerOnVM_Task, 'duration_secs': 1.152764} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.084167] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 901.084350] env[69927]: INFO nova.compute.manager [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Took 12.76 seconds to spawn the instance on the hypervisor. [ 901.084537] env[69927]: DEBUG nova.compute.manager [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 901.085928] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5d9214c-f43a-4e31-9cd3-b3caa0efc54e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.352478] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f7b862-af5f-2df6-cad6-f563828710c2, 'name': SearchDatastore_Task, 'duration_secs': 0.016812} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.352478] env[69927]: DEBUG oslo_concurrency.lockutils [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 901.352926] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 901.353541] env[69927]: DEBUG oslo_concurrency.lockutils [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.353541] env[69927]: DEBUG oslo_concurrency.lockutils [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 901.354110] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 901.354593] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a5b0dc6a-e174-4955-bd05-8ad04e991994 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.365980] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 901.366312] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 901.367185] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c66a9752-1b8d-4cc3-9a8a-f106af514eae {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.373985] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Waiting for the task: (returnval){ [ 901.373985] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52724a44-0db6-1380-a593-194918d3fd8f" [ 901.373985] env[69927]: _type = "Task" [ 901.373985] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.387097] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52724a44-0db6-1380-a593-194918d3fd8f, 'name': SearchDatastore_Task, 'duration_secs': 0.010263} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.388091] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99c55fbf-dc9d-4e0f-a899-f092ffa5b6e2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.394665] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Waiting for the task: (returnval){ [ 901.394665] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52410c74-1671-c23a-c202-e4786d90611b" [ 901.394665] env[69927]: _type = "Task" [ 901.394665] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.404825] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52410c74-1671-c23a-c202-e4786d90611b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.418033] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096033, 'name': Rename_Task, 'duration_secs': 0.18124} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.419604] env[69927]: DEBUG nova.policy [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ea8c48f887ee4cfbb11f427dd66a6f1a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72b796c4097e41cf947cacae04cb3a20', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 901.421979] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 901.424734] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7b9a206b-b696-4f83-a89d-4bc4954278e3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.437747] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 901.437747] env[69927]: value = "task-4096036" [ 901.437747] env[69927]: _type = "Task" [ 901.437747] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.450159] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096036, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.501076] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fd6a4488-0f5a-44de-b69c-84ea68d365c6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.511124] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-906a181c-298e-4dec-b1e1-56de21dc7876 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.555306] env[69927]: DEBUG nova.compute.manager [req-81cc1c0b-d60e-43c2-85bc-05cbbdf15ba5 req-e096244c-5a8b-46d2-aeee-ff68930db751 service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Detach interface failed, port_id=b0d2f460-225f-47fe-96d1-bdefd018eac5, reason: Instance 480a672c-cb48-45e3-86bd-1741957a5124 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 901.564713] env[69927]: DEBUG oslo_vmware.api [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096035, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165472} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.565253] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 901.565253] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 901.565551] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 901.565551] env[69927]: INFO nova.compute.manager [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Took 1.16 seconds to destroy the instance on the hypervisor. [ 901.565772] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 901.566665] env[69927]: DEBUG nova.compute.manager [-] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 901.566665] env[69927]: DEBUG nova.network.neutron [-] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 901.574017] env[69927]: DEBUG nova.compute.manager [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 901.608368] env[69927]: INFO nova.compute.manager [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Took 48.53 seconds to build instance. [ 901.766903] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adcd9679-6ed3-4df6-a630-d9a629a80249 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.775522] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd37769-f13c-4e81-b847-255023d2ca3b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.810850] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48069c4c-6220-46b8-b627-f13a498cd1bc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.819780] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-475ae9ef-8f60-4e6a-81d3-98069750f3d3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.835344] env[69927]: DEBUG nova.compute.provider_tree [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 901.907629] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52410c74-1671-c23a-c202-e4786d90611b, 'name': SearchDatastore_Task, 'duration_secs': 0.011311} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.907918] env[69927]: DEBUG oslo_concurrency.lockutils [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 901.908207] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] cb35090d-bfd2-46df-8ee5-d9b068ba0a28/cb35090d-bfd2-46df-8ee5-d9b068ba0a28.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 901.908765] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bb8efe8c-9d73-4166-a0a5-970eb2808016 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.918916] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Waiting for the task: (returnval){ [ 901.918916] env[69927]: value = "task-4096037" [ 901.918916] env[69927]: _type = "Task" [ 901.918916] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.931543] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4096037, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.947484] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096036, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.110597] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fab1d314-b435-4f4b-b80a-e85414f29af8 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "07484a6c-f9d1-405b-9ae4-a1b830f474ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.221s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.338548] env[69927]: DEBUG nova.scheduler.client.report [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 902.433830] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4096037, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.449122] env[69927]: DEBUG oslo_vmware.api [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096036, 'name': PowerOnVM_Task, 'duration_secs': 0.934142} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.449122] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 902.449122] env[69927]: INFO nova.compute.manager [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Took 11.26 seconds to spawn the instance on the hypervisor. [ 902.449564] env[69927]: DEBUG nova.compute.manager [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 902.450593] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6311211-3c67-49f1-8ad8-e599982ab68b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.585365] env[69927]: DEBUG nova.compute.manager [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 902.615607] env[69927]: DEBUG nova.compute.manager [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 902.625292] env[69927]: DEBUG nova.network.neutron [-] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.625292] env[69927]: DEBUG nova.virt.hardware [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 902.628894] env[69927]: DEBUG nova.virt.hardware [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 902.628894] env[69927]: DEBUG nova.virt.hardware [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 902.628894] env[69927]: DEBUG nova.virt.hardware [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 902.628894] env[69927]: DEBUG nova.virt.hardware [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 902.628894] env[69927]: DEBUG nova.virt.hardware [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 902.628894] env[69927]: DEBUG nova.virt.hardware [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 902.628894] env[69927]: DEBUG nova.virt.hardware [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 902.628894] env[69927]: DEBUG nova.virt.hardware [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 902.628894] env[69927]: DEBUG nova.virt.hardware [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 902.628894] env[69927]: DEBUG nova.virt.hardware [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 902.629533] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024aebf3-366a-48a8-bd92-fc6804976dc4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.643163] env[69927]: DEBUG nova.compute.manager [req-c6d606f5-593b-4f05-ab05-972a522ced92 req-646f4f8c-7f00-4831-b94b-c163f8887648 service nova] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Received event network-vif-deleted-2d989e8c-d768-494a-a866-4da8ff809d05 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 902.643163] env[69927]: INFO nova.compute.manager [req-c6d606f5-593b-4f05-ab05-972a522ced92 req-646f4f8c-7f00-4831-b94b-c163f8887648 service nova] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Neutron deleted interface 2d989e8c-d768-494a-a866-4da8ff809d05; detaching it from the instance and deleting it from the info cache [ 902.643163] env[69927]: DEBUG nova.network.neutron [req-c6d606f5-593b-4f05-ab05-972a522ced92 req-646f4f8c-7f00-4831-b94b-c163f8887648 service nova] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.652570] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67efd1cd-3260-4146-a085-b4c12fe3cac5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.700817] env[69927]: DEBUG nova.network.neutron [-] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.716022] env[69927]: DEBUG nova.network.neutron [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Successfully created port: afbb315a-bc71-453c-9b30-ef008a6e400e {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 902.769666] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Acquiring lock "8442f144-2be4-4634-b151-62f049a975b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.770099] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Lock "8442f144-2be4-4634-b151-62f049a975b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.770368] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Acquiring lock "8442f144-2be4-4634-b151-62f049a975b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.770603] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Lock "8442f144-2be4-4634-b151-62f049a975b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.771010] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Lock "8442f144-2be4-4634-b151-62f049a975b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.775074] env[69927]: INFO nova.compute.manager [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Terminating instance [ 902.845648] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.284s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.853351] env[69927]: DEBUG oslo_concurrency.lockutils [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.042s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.854121] env[69927]: INFO nova.compute.claims [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 902.886230] env[69927]: INFO nova.scheduler.client.report [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Deleted allocations for instance a536b069-45e0-4ffe-be53-ac33f8cb6ec0 [ 902.934351] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4096037, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52764} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.935503] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] cb35090d-bfd2-46df-8ee5-d9b068ba0a28/cb35090d-bfd2-46df-8ee5-d9b068ba0a28.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 902.935724] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 902.936296] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9114e023-fae7-4caa-a9cd-4b200a2fb5c1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.945657] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Waiting for the task: (returnval){ [ 902.945657] env[69927]: value = "task-4096038" [ 902.945657] env[69927]: _type = "Task" [ 902.945657] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.955367] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4096038, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.975047] env[69927]: INFO nova.compute.manager [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Took 44.94 seconds to build instance. [ 903.136561] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 903.139924] env[69927]: INFO nova.compute.manager [-] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Took 3.36 seconds to deallocate network for instance. [ 903.154284] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-89e89a36-db26-4b46-85d4-dfa5221628fc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.167469] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d55e4797-f5e5-4fa9-91ce-d84b271c0385 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.227031] env[69927]: INFO nova.compute.manager [-] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Took 1.66 seconds to deallocate network for instance. [ 903.227031] env[69927]: DEBUG nova.compute.manager [req-c6d606f5-593b-4f05-ab05-972a522ced92 req-646f4f8c-7f00-4831-b94b-c163f8887648 service nova] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Detach interface failed, port_id=2d989e8c-d768-494a-a866-4da8ff809d05, reason: Instance f6972b90-7746-4a37-8be8-1739f96dc3dc could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 903.280878] env[69927]: DEBUG nova.compute.manager [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 903.281250] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 903.282231] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3d3b41-f935-4b0a-8f6a-395d8d08eda5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.291520] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 903.294533] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a994cd68-3edf-4968-8c78-9494a16d4300 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.300708] env[69927]: DEBUG oslo_vmware.api [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Waiting for the task: (returnval){ [ 903.300708] env[69927]: value = "task-4096039" [ 903.300708] env[69927]: _type = "Task" [ 903.300708] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.312081] env[69927]: DEBUG oslo_vmware.api [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': task-4096039, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.401142] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4a51a10f-ad24-4cc4-8eed-ce2af1886978 tempest-ListImageFiltersTestJSON-1014675120 tempest-ListImageFiltersTestJSON-1014675120-project-member] Lock "a536b069-45e0-4ffe-be53-ac33f8cb6ec0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.414s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 903.460043] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4096038, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080188} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.460390] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 903.461474] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07860630-da8b-4f74-88d7-b28e166e5afd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.485780] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] cb35090d-bfd2-46df-8ee5-d9b068ba0a28/cb35090d-bfd2-46df-8ee5-d9b068ba0a28.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 903.486315] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0020d687-e9e7-4fee-9b10-72bdc0456f45 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "a9a62523-50fb-44b2-bfc8-9c6664dbf050" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.436s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 903.486533] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c573e83-8461-41db-9473-aa17f17118dd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.510942] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Waiting for the task: (returnval){ [ 903.510942] env[69927]: value = "task-4096040" [ 903.510942] env[69927]: _type = "Task" [ 903.510942] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.523137] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4096040, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.546892] env[69927]: DEBUG nova.compute.manager [req-ef2bdfc5-0194-42ad-ac01-352d8041a93c req-2b29866b-e859-4576-8593-7c073f67999b service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Received event network-vif-deleted-fa4af0b2-0db2-4cb8-bfd1-8a09dc8c41d1 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 903.546892] env[69927]: DEBUG nova.compute.manager [req-ef2bdfc5-0194-42ad-ac01-352d8041a93c req-2b29866b-e859-4576-8593-7c073f67999b service nova] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Received event network-vif-deleted-e789eded-c761-41d1-90f9-0541da7d7635 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 903.650089] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 903.739018] env[69927]: DEBUG oslo_concurrency.lockutils [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 903.813781] env[69927]: DEBUG oslo_vmware.api [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': task-4096039, 'name': PowerOffVM_Task, 'duration_secs': 0.400865} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.814046] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 903.814354] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 903.814908] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bc857142-e131-4701-af24-177e24185e25 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.908858] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 903.908858] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 903.908858] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Deleting the datastore file [datastore2] 8442f144-2be4-4634-b151-62f049a975b6 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 903.909105] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cc478c18-fb3e-4301-a468-ff8ac9fff9a0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.916806] env[69927]: DEBUG oslo_vmware.api [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Waiting for the task: (returnval){ [ 903.916806] env[69927]: value = "task-4096042" [ 903.916806] env[69927]: _type = "Task" [ 903.916806] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.932260] env[69927]: DEBUG oslo_vmware.api [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': task-4096042, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.009794] env[69927]: DEBUG nova.compute.manager [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 904.028490] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4096040, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.432206] env[69927]: DEBUG oslo_vmware.api [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Task: {'id': task-4096042, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.416957} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.436997] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 904.437278] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 904.437466] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 904.437644] env[69927]: INFO nova.compute.manager [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Took 1.16 seconds to destroy the instance on the hypervisor. [ 904.437933] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 904.438457] env[69927]: DEBUG nova.compute.manager [-] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 904.438912] env[69927]: DEBUG nova.network.neutron [-] [instance: 8442f144-2be4-4634-b151-62f049a975b6] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 904.540324] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4096040, 'name': ReconfigVM_Task, 'duration_secs': 0.53662} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.543524] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Reconfigured VM instance instance-00000034 to attach disk [datastore1] cb35090d-bfd2-46df-8ee5-d9b068ba0a28/cb35090d-bfd2-46df-8ee5-d9b068ba0a28.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 904.544679] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-47e460d8-8165-4134-ba8c-a2a9bf719f58 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.547277] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.554757] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Waiting for the task: (returnval){ [ 904.554757] env[69927]: value = "task-4096043" [ 904.554757] env[69927]: _type = "Task" [ 904.554757] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.567291] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4096043, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.636028] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d894ad6f-ac5a-434e-8181-341fbe6bc1e8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.644074] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2e2194-a186-47b9-867b-05d2e7428e0e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.677648] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02616155-32e6-4b3b-9bb8-8ac53a655602 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.685853] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9059ebda-d89b-4384-90b3-bbae59d7afd2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.705393] env[69927]: DEBUG nova.compute.provider_tree [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 905.069575] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4096043, 'name': Rename_Task, 'duration_secs': 0.384668} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.069881] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 905.070290] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5864c8c5-d8a0-4a1e-9fe7-4fe6670b86a5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.080296] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Waiting for the task: (returnval){ [ 905.080296] env[69927]: value = "task-4096044" [ 905.080296] env[69927]: _type = "Task" [ 905.080296] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.089850] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4096044, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.208967] env[69927]: DEBUG nova.scheduler.client.report [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 905.444678] env[69927]: DEBUG nova.network.neutron [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Successfully updated port: afbb315a-bc71-453c-9b30-ef008a6e400e {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 905.597151] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4096044, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.720444] env[69927]: DEBUG oslo_concurrency.lockutils [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.868s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.721024] env[69927]: DEBUG nova.compute.manager [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 905.724747] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 36.356s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.739646] env[69927]: DEBUG nova.network.neutron [-] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.785802] env[69927]: DEBUG nova.compute.manager [req-a0533beb-d8e8-4c63-92f1-ffafa01223ff req-c0bbe9c7-f8d1-4f81-8bbc-bd278f7b7ed9 service nova] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Received event network-vif-deleted-9a088536-28fd-4645-87ba-e071c754db1e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 905.949110] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Acquiring lock "refresh_cache-c3a531fd-647c-43b6-9d3d-fc6ecbc2445e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.949428] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Acquired lock "refresh_cache-c3a531fd-647c-43b6-9d3d-fc6ecbc2445e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 905.949710] env[69927]: DEBUG nova.network.neutron [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 906.094117] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4096044, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.175073] env[69927]: DEBUG oslo_concurrency.lockutils [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "67e00c40-35b6-4a9f-9505-19b804e78c04" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.175706] env[69927]: DEBUG oslo_concurrency.lockutils [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "67e00c40-35b6-4a9f-9505-19b804e78c04" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.233199] env[69927]: INFO nova.compute.claims [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 906.243196] env[69927]: DEBUG nova.compute.utils [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 906.245596] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquiring lock "07484a6c-f9d1-405b-9ae4-a1b830f474ed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.245725] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "07484a6c-f9d1-405b-9ae4-a1b830f474ed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.245920] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquiring lock "07484a6c-f9d1-405b-9ae4-a1b830f474ed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.246175] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "07484a6c-f9d1-405b-9ae4-a1b830f474ed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.246931] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "07484a6c-f9d1-405b-9ae4-a1b830f474ed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.253506] env[69927]: INFO nova.compute.manager [-] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Took 1.81 seconds to deallocate network for instance. [ 906.253506] env[69927]: DEBUG nova.compute.manager [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 906.253506] env[69927]: DEBUG nova.network.neutron [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 906.256899] env[69927]: INFO nova.compute.manager [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Terminating instance [ 906.303840] env[69927]: DEBUG nova.policy [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '53f63b6df5ed46fd8584bbf36716a0c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '66cbe813421e40d1bd515411bc3c045a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 906.363014] env[69927]: DEBUG nova.compute.manager [req-8de63fa1-746d-4cc2-b5ea-327eaf5622ba req-f08011a3-1ad1-43ea-a88d-0e7ce489e207 service nova] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Received event network-vif-plugged-afbb315a-bc71-453c-9b30-ef008a6e400e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 906.363260] env[69927]: DEBUG oslo_concurrency.lockutils [req-8de63fa1-746d-4cc2-b5ea-327eaf5622ba req-f08011a3-1ad1-43ea-a88d-0e7ce489e207 service nova] Acquiring lock "c3a531fd-647c-43b6-9d3d-fc6ecbc2445e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.363529] env[69927]: DEBUG oslo_concurrency.lockutils [req-8de63fa1-746d-4cc2-b5ea-327eaf5622ba req-f08011a3-1ad1-43ea-a88d-0e7ce489e207 service nova] Lock "c3a531fd-647c-43b6-9d3d-fc6ecbc2445e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.363687] env[69927]: DEBUG oslo_concurrency.lockutils [req-8de63fa1-746d-4cc2-b5ea-327eaf5622ba req-f08011a3-1ad1-43ea-a88d-0e7ce489e207 service nova] Lock "c3a531fd-647c-43b6-9d3d-fc6ecbc2445e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.364529] env[69927]: DEBUG nova.compute.manager [req-8de63fa1-746d-4cc2-b5ea-327eaf5622ba req-f08011a3-1ad1-43ea-a88d-0e7ce489e207 service nova] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] No waiting events found dispatching network-vif-plugged-afbb315a-bc71-453c-9b30-ef008a6e400e {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 906.366071] env[69927]: WARNING nova.compute.manager [req-8de63fa1-746d-4cc2-b5ea-327eaf5622ba req-f08011a3-1ad1-43ea-a88d-0e7ce489e207 service nova] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Received unexpected event network-vif-plugged-afbb315a-bc71-453c-9b30-ef008a6e400e for instance with vm_state building and task_state spawning. [ 906.366447] env[69927]: DEBUG nova.compute.manager [req-8de63fa1-746d-4cc2-b5ea-327eaf5622ba req-f08011a3-1ad1-43ea-a88d-0e7ce489e207 service nova] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Received event network-changed-afbb315a-bc71-453c-9b30-ef008a6e400e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 906.366979] env[69927]: DEBUG nova.compute.manager [req-8de63fa1-746d-4cc2-b5ea-327eaf5622ba req-f08011a3-1ad1-43ea-a88d-0e7ce489e207 service nova] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Refreshing instance network info cache due to event network-changed-afbb315a-bc71-453c-9b30-ef008a6e400e. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 906.366979] env[69927]: DEBUG oslo_concurrency.lockutils [req-8de63fa1-746d-4cc2-b5ea-327eaf5622ba req-f08011a3-1ad1-43ea-a88d-0e7ce489e207 service nova] Acquiring lock "refresh_cache-c3a531fd-647c-43b6-9d3d-fc6ecbc2445e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.492490] env[69927]: DEBUG nova.network.neutron [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 906.594751] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4096044, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.747215] env[69927]: INFO nova.compute.resource_tracker [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Updating resource usage from migration 640acec6-f769-4d56-a7aa-84655b85e53f [ 906.764156] env[69927]: DEBUG nova.network.neutron [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Successfully created port: 6e95f83c-01f6-4bbe-800b-ed805d60f684 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 906.767488] env[69927]: DEBUG nova.compute.manager [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 906.770070] env[69927]: DEBUG nova.compute.manager [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 906.770306] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 906.771131] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.772262] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-620825b2-9416-42b9-8a0e-558e4cf4b73b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.783010] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 906.783296] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad1197a7-aa25-4e58-9865-45d627501f35 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.792010] env[69927]: DEBUG oslo_vmware.api [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for the task: (returnval){ [ 906.792010] env[69927]: value = "task-4096045" [ 906.792010] env[69927]: _type = "Task" [ 906.792010] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.806517] env[69927]: DEBUG oslo_vmware.api [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4096045, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.007057] env[69927]: DEBUG nova.network.neutron [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Updating instance_info_cache with network_info: [{"id": "afbb315a-bc71-453c-9b30-ef008a6e400e", "address": "fa:16:3e:be:32:53", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.220", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapafbb315a-bc", "ovs_interfaceid": "afbb315a-bc71-453c-9b30-ef008a6e400e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.036575] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Acquiring lock "01c8eb3b-bf30-4b00-af71-e32f0dc19171" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.037013] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Lock "01c8eb3b-bf30-4b00-af71-e32f0dc19171" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.094252] env[69927]: DEBUG oslo_vmware.api [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4096044, 'name': PowerOnVM_Task, 'duration_secs': 1.979567} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.097350] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 907.097584] env[69927]: DEBUG nova.compute.manager [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 907.098970] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ae4fcd9-0440-47d1-8bb7-484192a24a69 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.199804] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Acquiring lock "aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.199804] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Lock "aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.199804] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Acquiring lock "aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.199804] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Lock "aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.199804] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Lock "aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.203666] env[69927]: INFO nova.compute.manager [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Terminating instance [ 907.312467] env[69927]: DEBUG oslo_vmware.api [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4096045, 'name': PowerOffVM_Task, 'duration_secs': 0.237751} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.312467] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 907.312635] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 907.312993] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c87b5bdf-1229-46dc-ba8e-45524c2aeaf7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.402233] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 907.402463] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 907.402647] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Deleting the datastore file [datastore2] 07484a6c-f9d1-405b-9ae4-a1b830f474ed {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 907.403433] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-11de2e77-7cb7-4702-88c8-1af4500312b8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.415565] env[69927]: DEBUG oslo_vmware.api [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for the task: (returnval){ [ 907.415565] env[69927]: value = "task-4096047" [ 907.415565] env[69927]: _type = "Task" [ 907.415565] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.421868] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f31c99-d6bc-4d15-bf3a-7580160249a6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.440213] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edbc22f1-d668-4e25-a573-260d90a43a44 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.441614] env[69927]: DEBUG oslo_vmware.api [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4096047, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.474363] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a685a1b-bd8b-4169-992c-cbf82be11576 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.483110] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b4406e4-c8a0-4149-a472-ac2f42f5d359 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.499138] env[69927]: DEBUG nova.compute.provider_tree [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 907.510514] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Releasing lock "refresh_cache-c3a531fd-647c-43b6-9d3d-fc6ecbc2445e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 907.511135] env[69927]: DEBUG nova.compute.manager [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Instance network_info: |[{"id": "afbb315a-bc71-453c-9b30-ef008a6e400e", "address": "fa:16:3e:be:32:53", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.220", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapafbb315a-bc", "ovs_interfaceid": "afbb315a-bc71-453c-9b30-ef008a6e400e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 907.511837] env[69927]: DEBUG oslo_concurrency.lockutils [req-8de63fa1-746d-4cc2-b5ea-327eaf5622ba req-f08011a3-1ad1-43ea-a88d-0e7ce489e207 service nova] Acquired lock "refresh_cache-c3a531fd-647c-43b6-9d3d-fc6ecbc2445e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 907.511965] env[69927]: DEBUG nova.network.neutron [req-8de63fa1-746d-4cc2-b5ea-327eaf5622ba req-f08011a3-1ad1-43ea-a88d-0e7ce489e207 service nova] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Refreshing network info cache for port afbb315a-bc71-453c-9b30-ef008a6e400e {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 907.513127] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:32:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '10b81051-1eb1-406b-888c-4548c470c77e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'afbb315a-bc71-453c-9b30-ef008a6e400e', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 907.522512] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Creating folder: Project (72b796c4097e41cf947cacae04cb3a20). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 907.523614] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6ed92804-d750-44b7-99e4-09b6cc6240d9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.537302] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Created folder: Project (72b796c4097e41cf947cacae04cb3a20) in parent group-v811283. [ 907.537757] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Creating folder: Instances. Parent ref: group-v811454. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 907.537914] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-54d1b620-8984-4b24-b4bb-ee837fcee259 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.550368] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Created folder: Instances in parent group-v811454. [ 907.550368] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 907.550368] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 907.550714] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bf8cc7a2-fd1c-4830-bfc8-d8f6d5794d80 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.574414] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 907.574414] env[69927]: value = "task-4096050" [ 907.574414] env[69927]: _type = "Task" [ 907.574414] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.580800] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096050, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.619020] env[69927]: DEBUG oslo_concurrency.lockutils [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.714273] env[69927]: DEBUG nova.compute.manager [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 907.714596] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 907.715991] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8046ba87-2e3e-4188-85aa-e578be11c193 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.726368] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 907.726368] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de428aa9-ccff-42fe-9467-befe2f448feb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.734572] env[69927]: DEBUG oslo_vmware.api [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Waiting for the task: (returnval){ [ 907.734572] env[69927]: value = "task-4096051" [ 907.734572] env[69927]: _type = "Task" [ 907.734572] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.745300] env[69927]: DEBUG oslo_vmware.api [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Task: {'id': task-4096051, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.791494] env[69927]: DEBUG nova.compute.manager [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 907.827176] env[69927]: DEBUG nova.virt.hardware [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 907.827176] env[69927]: DEBUG nova.virt.hardware [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 907.827176] env[69927]: DEBUG nova.virt.hardware [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 907.827176] env[69927]: DEBUG nova.virt.hardware [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 907.827176] env[69927]: DEBUG nova.virt.hardware [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 907.827176] env[69927]: DEBUG nova.virt.hardware [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 907.828433] env[69927]: DEBUG nova.virt.hardware [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 907.828901] env[69927]: DEBUG nova.virt.hardware [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 907.829270] env[69927]: DEBUG nova.virt.hardware [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 907.829640] env[69927]: DEBUG nova.virt.hardware [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 907.829976] env[69927]: DEBUG nova.virt.hardware [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 907.831275] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbdcf4e7-0d00-4608-bab6-0d990b02136c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.844696] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82017a9e-db00-4842-8143-3ce19f0f37d4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.927129] env[69927]: DEBUG oslo_vmware.api [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4096047, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.336596} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.927439] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 907.929492] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 907.929492] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 907.929492] env[69927]: INFO nova.compute.manager [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Took 1.16 seconds to destroy the instance on the hypervisor. [ 907.929492] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 907.929492] env[69927]: DEBUG nova.compute.manager [-] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 907.929492] env[69927]: DEBUG nova.network.neutron [-] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 908.003194] env[69927]: DEBUG nova.scheduler.client.report [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 908.086153] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096050, 'name': CreateVM_Task, 'duration_secs': 0.447521} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.086729] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 908.087799] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.088111] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 908.088653] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 908.090251] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e82fd30-9737-4b1f-9696-3562d52be1b3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.099232] env[69927]: DEBUG oslo_vmware.api [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Waiting for the task: (returnval){ [ 908.099232] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5276e08c-e7b5-48c7-2753-46811250c3f9" [ 908.099232] env[69927]: _type = "Task" [ 908.099232] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.111422] env[69927]: DEBUG oslo_vmware.api [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5276e08c-e7b5-48c7-2753-46811250c3f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.256628] env[69927]: DEBUG oslo_vmware.api [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Task: {'id': task-4096051, 'name': PowerOffVM_Task, 'duration_secs': 0.217966} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.257165] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 908.257365] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 908.257690] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a6747d01-1f8b-4006-9e32-5e42f1f38325 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.337606] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 908.337972] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 908.338207] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Deleting the datastore file [datastore2] aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 908.338512] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ec6841a8-53aa-4e04-ac8c-d9fd2cce409a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.350304] env[69927]: DEBUG oslo_vmware.api [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Waiting for the task: (returnval){ [ 908.350304] env[69927]: value = "task-4096053" [ 908.350304] env[69927]: _type = "Task" [ 908.350304] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.360252] env[69927]: DEBUG oslo_vmware.api [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Task: {'id': task-4096053, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.460193] env[69927]: DEBUG nova.network.neutron [req-8de63fa1-746d-4cc2-b5ea-327eaf5622ba req-f08011a3-1ad1-43ea-a88d-0e7ce489e207 service nova] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Updated VIF entry in instance network info cache for port afbb315a-bc71-453c-9b30-ef008a6e400e. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 908.460557] env[69927]: DEBUG nova.network.neutron [req-8de63fa1-746d-4cc2-b5ea-327eaf5622ba req-f08011a3-1ad1-43ea-a88d-0e7ce489e207 service nova] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Updating instance_info_cache with network_info: [{"id": "afbb315a-bc71-453c-9b30-ef008a6e400e", "address": "fa:16:3e:be:32:53", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.220", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapafbb315a-bc", "ovs_interfaceid": "afbb315a-bc71-453c-9b30-ef008a6e400e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.513499] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.789s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.513754] env[69927]: INFO nova.compute.manager [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Migrating [ 908.525238] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.062s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.525345] env[69927]: DEBUG nova.objects.instance [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lazy-loading 'resources' on Instance uuid 80fc9add-683b-424e-9876-cdcae664e2da {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 908.613745] env[69927]: DEBUG oslo_vmware.api [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5276e08c-e7b5-48c7-2753-46811250c3f9, 'name': SearchDatastore_Task, 'duration_secs': 0.011396} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.613958] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 908.614273] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 908.614580] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.614785] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 908.615045] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 908.615382] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d61bedc3-bc16-4f2a-baa3-591d079fe338 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.624648] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 908.625026] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 908.625616] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-807824b1-c3d4-4d77-9f7f-73b846ac3813 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.632065] env[69927]: DEBUG oslo_vmware.api [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Waiting for the task: (returnval){ [ 908.632065] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52491a27-6a2f-d609-3774-690e87bcb157" [ 908.632065] env[69927]: _type = "Task" [ 908.632065] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.647138] env[69927]: DEBUG oslo_vmware.api [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52491a27-6a2f-d609-3774-690e87bcb157, 'name': SearchDatastore_Task, 'duration_secs': 0.010817} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.647999] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-982a3b44-45f5-4aef-ba61-1dcd0dac2bbe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.654421] env[69927]: DEBUG oslo_vmware.api [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Waiting for the task: (returnval){ [ 908.654421] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ccbb9d-5cd7-524e-692b-3bfac6b700fa" [ 908.654421] env[69927]: _type = "Task" [ 908.654421] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.664608] env[69927]: DEBUG oslo_vmware.api [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ccbb9d-5cd7-524e-692b-3bfac6b700fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.767785] env[69927]: DEBUG nova.compute.manager [req-9a5c25c4-3375-4392-b117-cc9230e38a48 req-d3de2e73-4d52-4b1c-adc3-a03ee9c096f8 service nova] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Received event network-vif-deleted-703aee39-6cbb-4cc4-8043-e405ac713dda {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 908.767785] env[69927]: INFO nova.compute.manager [req-9a5c25c4-3375-4392-b117-cc9230e38a48 req-d3de2e73-4d52-4b1c-adc3-a03ee9c096f8 service nova] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Neutron deleted interface 703aee39-6cbb-4cc4-8043-e405ac713dda; detaching it from the instance and deleting it from the info cache [ 908.767785] env[69927]: DEBUG nova.network.neutron [req-9a5c25c4-3375-4392-b117-cc9230e38a48 req-d3de2e73-4d52-4b1c-adc3-a03ee9c096f8 service nova] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.863021] env[69927]: DEBUG oslo_vmware.api [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Task: {'id': task-4096053, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.221698} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.863356] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 908.863494] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 908.863679] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 908.863860] env[69927]: INFO nova.compute.manager [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Took 1.15 seconds to destroy the instance on the hypervisor. [ 908.864157] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 908.864368] env[69927]: DEBUG nova.compute.manager [-] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 908.864464] env[69927]: DEBUG nova.network.neutron [-] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 908.876868] env[69927]: DEBUG nova.network.neutron [-] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.902720] env[69927]: DEBUG nova.network.neutron [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Successfully updated port: 6e95f83c-01f6-4bbe-800b-ed805d60f684 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 908.964417] env[69927]: DEBUG oslo_concurrency.lockutils [req-8de63fa1-746d-4cc2-b5ea-327eaf5622ba req-f08011a3-1ad1-43ea-a88d-0e7ce489e207 service nova] Releasing lock "refresh_cache-c3a531fd-647c-43b6-9d3d-fc6ecbc2445e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 909.001221] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Acquiring lock "cb35090d-bfd2-46df-8ee5-d9b068ba0a28" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.001221] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Lock "cb35090d-bfd2-46df-8ee5-d9b068ba0a28" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.001221] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Acquiring lock "cb35090d-bfd2-46df-8ee5-d9b068ba0a28-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.001221] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Lock "cb35090d-bfd2-46df-8ee5-d9b068ba0a28-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.001221] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Lock "cb35090d-bfd2-46df-8ee5-d9b068ba0a28-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.003907] env[69927]: INFO nova.compute.manager [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Terminating instance [ 909.034629] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "refresh_cache-c3e8a429-8484-4b11-abe3-1cccf0992556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.034629] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquired lock "refresh_cache-c3e8a429-8484-4b11-abe3-1cccf0992556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 909.034629] env[69927]: DEBUG nova.network.neutron [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 909.171760] env[69927]: DEBUG oslo_vmware.api [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ccbb9d-5cd7-524e-692b-3bfac6b700fa, 'name': SearchDatastore_Task, 'duration_secs': 0.010902} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.171760] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 909.171760] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] c3a531fd-647c-43b6-9d3d-fc6ecbc2445e/c3a531fd-647c-43b6-9d3d-fc6ecbc2445e.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 909.171944] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b700944c-10df-429b-84f4-73f4b551395a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.188543] env[69927]: DEBUG oslo_vmware.api [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Waiting for the task: (returnval){ [ 909.188543] env[69927]: value = "task-4096054" [ 909.188543] env[69927]: _type = "Task" [ 909.188543] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.199761] env[69927]: DEBUG nova.compute.manager [req-5b94984a-928d-4406-a252-5462af967ec0 req-ec9a46c5-bb98-4a8f-bb5f-d429b4ebac7e service nova] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Received event network-vif-plugged-6e95f83c-01f6-4bbe-800b-ed805d60f684 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 909.200033] env[69927]: DEBUG oslo_concurrency.lockutils [req-5b94984a-928d-4406-a252-5462af967ec0 req-ec9a46c5-bb98-4a8f-bb5f-d429b4ebac7e service nova] Acquiring lock "e1946033-4ec3-4561-afdf-a3b748f7c611-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.202167] env[69927]: DEBUG oslo_concurrency.lockutils [req-5b94984a-928d-4406-a252-5462af967ec0 req-ec9a46c5-bb98-4a8f-bb5f-d429b4ebac7e service nova] Lock "e1946033-4ec3-4561-afdf-a3b748f7c611-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.202167] env[69927]: DEBUG oslo_concurrency.lockutils [req-5b94984a-928d-4406-a252-5462af967ec0 req-ec9a46c5-bb98-4a8f-bb5f-d429b4ebac7e service nova] Lock "e1946033-4ec3-4561-afdf-a3b748f7c611-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.202167] env[69927]: DEBUG nova.compute.manager [req-5b94984a-928d-4406-a252-5462af967ec0 req-ec9a46c5-bb98-4a8f-bb5f-d429b4ebac7e service nova] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] No waiting events found dispatching network-vif-plugged-6e95f83c-01f6-4bbe-800b-ed805d60f684 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 909.202167] env[69927]: WARNING nova.compute.manager [req-5b94984a-928d-4406-a252-5462af967ec0 req-ec9a46c5-bb98-4a8f-bb5f-d429b4ebac7e service nova] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Received unexpected event network-vif-plugged-6e95f83c-01f6-4bbe-800b-ed805d60f684 for instance with vm_state building and task_state spawning. [ 909.206324] env[69927]: DEBUG oslo_vmware.api [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096054, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.270905] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f8f37140-5ddb-4768-b6b8-95d892f943c2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.287310] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d87334-79ae-47ad-ae5a-7d1bca57a04f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.327552] env[69927]: DEBUG nova.compute.manager [req-9a5c25c4-3375-4392-b117-cc9230e38a48 req-d3de2e73-4d52-4b1c-adc3-a03ee9c096f8 service nova] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Detach interface failed, port_id=703aee39-6cbb-4cc4-8043-e405ac713dda, reason: Instance 07484a6c-f9d1-405b-9ae4-a1b830f474ed could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 909.379242] env[69927]: INFO nova.compute.manager [-] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Took 1.45 seconds to deallocate network for instance. [ 909.407575] env[69927]: DEBUG oslo_concurrency.lockutils [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquiring lock "refresh_cache-e1946033-4ec3-4561-afdf-a3b748f7c611" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.407728] env[69927]: DEBUG oslo_concurrency.lockutils [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquired lock "refresh_cache-e1946033-4ec3-4561-afdf-a3b748f7c611" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 909.407845] env[69927]: DEBUG nova.network.neutron [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 909.510616] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Acquiring lock "refresh_cache-cb35090d-bfd2-46df-8ee5-d9b068ba0a28" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.510844] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Acquired lock "refresh_cache-cb35090d-bfd2-46df-8ee5-d9b068ba0a28" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 909.511072] env[69927]: DEBUG nova.network.neutron [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 909.714263] env[69927]: DEBUG oslo_vmware.api [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096054, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.771957] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0150d133-e1fd-4cac-ad66-7e1aec875375 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.780128] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-232d968f-2d9c-4efa-8227-7c8c6abd084f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.815749] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff2eabb-7657-4504-b74e-9128c7764cf1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.822174] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad49fd3-9899-4265-a13c-f46132781a78 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.838011] env[69927]: DEBUG nova.compute.provider_tree [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 909.845099] env[69927]: DEBUG nova.network.neutron [-] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.891649] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.958967] env[69927]: DEBUG nova.network.neutron [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 910.009318] env[69927]: DEBUG nova.network.neutron [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Updating instance_info_cache with network_info: [{"id": "32049b49-d761-48ff-8938-d76ebe86f62e", "address": "fa:16:3e:64:c8:10", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.140", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32049b49-d7", "ovs_interfaceid": "32049b49-d761-48ff-8938-d76ebe86f62e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.045019] env[69927]: DEBUG nova.network.neutron [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 910.209405] env[69927]: DEBUG oslo_vmware.api [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096054, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.556049} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.209405] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] c3a531fd-647c-43b6-9d3d-fc6ecbc2445e/c3a531fd-647c-43b6-9d3d-fc6ecbc2445e.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 910.209709] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 910.209798] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c7a9357d-e308-4ca5-99d8-00b8b726e01b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.218300] env[69927]: DEBUG oslo_vmware.api [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Waiting for the task: (returnval){ [ 910.218300] env[69927]: value = "task-4096055" [ 910.218300] env[69927]: _type = "Task" [ 910.218300] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.225045] env[69927]: DEBUG nova.network.neutron [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.231222] env[69927]: DEBUG oslo_vmware.api [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096055, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.321798] env[69927]: DEBUG nova.network.neutron [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Updating instance_info_cache with network_info: [{"id": "6e95f83c-01f6-4bbe-800b-ed805d60f684", "address": "fa:16:3e:85:c5:49", "network": {"id": "8b41ee82-5412-4d31-ae76-47e8663487eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-523653041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66cbe813421e40d1bd515411bc3c045a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ffb921-796a-40fe-9662-d3fc01547dcb", "external-id": "nsx-vlan-transportzone-331", "segmentation_id": 331, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e95f83c-01", "ovs_interfaceid": "6e95f83c-01f6-4bbe-800b-ed805d60f684", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.341383] env[69927]: DEBUG nova.scheduler.client.report [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 910.347021] env[69927]: INFO nova.compute.manager [-] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Took 1.48 seconds to deallocate network for instance. [ 910.511402] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Releasing lock "refresh_cache-c3e8a429-8484-4b11-abe3-1cccf0992556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 910.624623] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Acquiring lock "74ea584f-b20f-425b-acb3-0ec60e7f2a1e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 910.624848] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Lock "74ea584f-b20f-425b-acb3-0ec60e7f2a1e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 910.729082] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Releasing lock "refresh_cache-cb35090d-bfd2-46df-8ee5-d9b068ba0a28" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 910.729514] env[69927]: DEBUG nova.compute.manager [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 910.729764] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 910.730107] env[69927]: DEBUG oslo_vmware.api [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096055, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076743} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.730857] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c607a731-0b91-42e0-ae4a-e37e9fe9e7d8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.733722] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 910.734575] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8aa0279-5132-4318-87d0-1a93f8ebcd43 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.760671] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] c3a531fd-647c-43b6-9d3d-fc6ecbc2445e/c3a531fd-647c-43b6-9d3d-fc6ecbc2445e.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 910.763541] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb7f7db7-85cd-4297-b026-f3b4e1038571 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.778567] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 910.779143] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c73fb4db-1e91-460d-af84-68235fadf9c2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.786731] env[69927]: DEBUG oslo_vmware.api [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Waiting for the task: (returnval){ [ 910.786731] env[69927]: value = "task-4096056" [ 910.786731] env[69927]: _type = "Task" [ 910.786731] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.788287] env[69927]: DEBUG oslo_vmware.api [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Waiting for the task: (returnval){ [ 910.788287] env[69927]: value = "task-4096057" [ 910.788287] env[69927]: _type = "Task" [ 910.788287] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.800794] env[69927]: DEBUG oslo_vmware.api [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4096056, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.804354] env[69927]: DEBUG oslo_vmware.api [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096057, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.828420] env[69927]: DEBUG oslo_concurrency.lockutils [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Releasing lock "refresh_cache-e1946033-4ec3-4561-afdf-a3b748f7c611" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 910.828622] env[69927]: DEBUG nova.compute.manager [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Instance network_info: |[{"id": "6e95f83c-01f6-4bbe-800b-ed805d60f684", "address": "fa:16:3e:85:c5:49", "network": {"id": "8b41ee82-5412-4d31-ae76-47e8663487eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-523653041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66cbe813421e40d1bd515411bc3c045a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ffb921-796a-40fe-9662-d3fc01547dcb", "external-id": "nsx-vlan-transportzone-331", "segmentation_id": 331, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e95f83c-01", "ovs_interfaceid": "6e95f83c-01f6-4bbe-800b-ed805d60f684", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 910.828962] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:c5:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '75ffb921-796a-40fe-9662-d3fc01547dcb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6e95f83c-01f6-4bbe-800b-ed805d60f684', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 910.840738] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 910.841105] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 910.841433] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-36378476-151e-4efa-9080-2e3d1046fa20 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.859080] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.334s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 910.863036] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 910.864755] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 36.400s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 910.864755] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 910.864755] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69927) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 910.864755] env[69927]: DEBUG oslo_concurrency.lockutils [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.047s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 910.866880] env[69927]: INFO nova.compute.claims [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 910.870791] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95de7ab-fea9-4cea-aa17-86cb6532cc26 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.875289] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 910.875289] env[69927]: value = "task-4096058" [ 910.875289] env[69927]: _type = "Task" [ 910.875289] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.882766] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b26f3674-7740-42b0-b18e-8ec9a515f990 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.890107] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096058, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.901584] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a02e00fc-c2b4-4161-ad47-2e14a2341a52 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.905597] env[69927]: INFO nova.scheduler.client.report [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Deleted allocations for instance 80fc9add-683b-424e-9876-cdcae664e2da [ 910.913762] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7423183b-bfc7-408b-a78f-b024b796338d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.946345] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178233MB free_disk=16GB free_vcpus=48 pci_devices=None {{(pid=69927) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 910.946563] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.301634] env[69927]: DEBUG oslo_vmware.api [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096057, 'name': ReconfigVM_Task, 'duration_secs': 0.471307} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.304990] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Reconfigured VM instance instance-00000038 to attach disk [datastore1] c3a531fd-647c-43b6-9d3d-fc6ecbc2445e/c3a531fd-647c-43b6-9d3d-fc6ecbc2445e.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 911.306348] env[69927]: DEBUG oslo_vmware.api [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4096056, 'name': PowerOffVM_Task, 'duration_secs': 0.145926} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.306595] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1ff8ba84-6813-499b-b463-0b0e402b58b3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.308473] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 911.308659] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 911.308900] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-21942b5c-c964-4d3d-9880-a1af7de62d55 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.315851] env[69927]: DEBUG oslo_vmware.api [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Waiting for the task: (returnval){ [ 911.315851] env[69927]: value = "task-4096059" [ 911.315851] env[69927]: _type = "Task" [ 911.315851] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.334701] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 911.334960] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 911.335164] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Deleting the datastore file [datastore1] cb35090d-bfd2-46df-8ee5-d9b068ba0a28 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 911.335646] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-759ef12c-dd63-4c1b-af99-139cdc75a37f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.346273] env[69927]: DEBUG oslo_vmware.api [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Waiting for the task: (returnval){ [ 911.346273] env[69927]: value = "task-4096061" [ 911.346273] env[69927]: _type = "Task" [ 911.346273] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.356275] env[69927]: DEBUG oslo_vmware.api [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4096061, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.386929] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096058, 'name': CreateVM_Task, 'duration_secs': 0.44065} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.387910] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 911.388756] env[69927]: DEBUG oslo_concurrency.lockutils [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.388884] env[69927]: DEBUG oslo_concurrency.lockutils [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 911.389254] env[69927]: DEBUG oslo_concurrency.lockutils [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 911.389801] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fca29cb6-a601-4304-ac5c-30e6029b01f6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.396681] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Waiting for the task: (returnval){ [ 911.396681] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e52a42-48c8-b45f-4616-874907127edf" [ 911.396681] env[69927]: _type = "Task" [ 911.396681] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.406441] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e52a42-48c8-b45f-4616-874907127edf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.416916] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6170d672-b086-4770-8dae-caf23df0c129 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "80fc9add-683b-424e-9876-cdcae664e2da" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.352s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.578639] env[69927]: DEBUG nova.compute.manager [req-32a7fa98-a0cd-4c89-b9e7-27f1f99ce0e4 req-ae932a6e-f4c5-4b36-a8dc-e924350c072b service nova] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Received event network-changed-6e95f83c-01f6-4bbe-800b-ed805d60f684 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 911.578639] env[69927]: DEBUG nova.compute.manager [req-32a7fa98-a0cd-4c89-b9e7-27f1f99ce0e4 req-ae932a6e-f4c5-4b36-a8dc-e924350c072b service nova] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Refreshing instance network info cache due to event network-changed-6e95f83c-01f6-4bbe-800b-ed805d60f684. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 911.578875] env[69927]: DEBUG oslo_concurrency.lockutils [req-32a7fa98-a0cd-4c89-b9e7-27f1f99ce0e4 req-ae932a6e-f4c5-4b36-a8dc-e924350c072b service nova] Acquiring lock "refresh_cache-e1946033-4ec3-4561-afdf-a3b748f7c611" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.578875] env[69927]: DEBUG oslo_concurrency.lockutils [req-32a7fa98-a0cd-4c89-b9e7-27f1f99ce0e4 req-ae932a6e-f4c5-4b36-a8dc-e924350c072b service nova] Acquired lock "refresh_cache-e1946033-4ec3-4561-afdf-a3b748f7c611" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 911.578991] env[69927]: DEBUG nova.network.neutron [req-32a7fa98-a0cd-4c89-b9e7-27f1f99ce0e4 req-ae932a6e-f4c5-4b36-a8dc-e924350c072b service nova] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Refreshing network info cache for port 6e95f83c-01f6-4bbe-800b-ed805d60f684 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 911.826941] env[69927]: DEBUG oslo_vmware.api [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096059, 'name': Rename_Task, 'duration_secs': 0.173327} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.827256] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 911.827506] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8e59ceb8-a5d5-4a8c-8fa9-1593b760e061 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.834816] env[69927]: DEBUG oslo_vmware.api [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Waiting for the task: (returnval){ [ 911.834816] env[69927]: value = "task-4096062" [ 911.834816] env[69927]: _type = "Task" [ 911.834816] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.843330] env[69927]: DEBUG oslo_vmware.api [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096062, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.855730] env[69927]: DEBUG oslo_vmware.api [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Task: {'id': task-4096061, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.095404} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.856940] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 911.857263] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 911.857369] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 911.857527] env[69927]: INFO nova.compute.manager [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Took 1.13 seconds to destroy the instance on the hypervisor. [ 911.857788] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 911.858013] env[69927]: DEBUG nova.compute.manager [-] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 911.858114] env[69927]: DEBUG nova.network.neutron [-] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 911.893686] env[69927]: DEBUG nova.network.neutron [-] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 911.908644] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e52a42-48c8-b45f-4616-874907127edf, 'name': SearchDatastore_Task, 'duration_secs': 0.010667} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.909610] env[69927]: DEBUG oslo_concurrency.lockutils [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 911.910153] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 911.910153] env[69927]: DEBUG oslo_concurrency.lockutils [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.910371] env[69927]: DEBUG oslo_concurrency.lockutils [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 911.910470] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 911.911350] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-792e9027-87d5-4434-9d1c-589c7a0b39a7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.921288] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 911.921489] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 911.922380] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e7de511-7e72-4b42-b850-57cb1b5482f4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.929755] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Waiting for the task: (returnval){ [ 911.929755] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f651bb-06c6-8b31-c2f2-8b40acd1d8fd" [ 911.929755] env[69927]: _type = "Task" [ 911.929755] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.941880] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f651bb-06c6-8b31-c2f2-8b40acd1d8fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.028638] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee206fd-1666-4a08-8724-e0aebf1229a8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.048317] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Updating instance 'c3e8a429-8484-4b11-abe3-1cccf0992556' progress to 0 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 912.349420] env[69927]: DEBUG oslo_vmware.api [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096062, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.402033] env[69927]: DEBUG nova.network.neutron [-] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.427863] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b360aca5-3a63-4d0d-80f5-f7a17cd14541 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.445969] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-955846ff-30fe-4a72-aeb5-9787d35bdf59 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.449583] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f651bb-06c6-8b31-c2f2-8b40acd1d8fd, 'name': SearchDatastore_Task, 'duration_secs': 0.01166} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.451313] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98d0f251-1de5-4906-931c-4306f15a4058 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.480511] env[69927]: DEBUG nova.network.neutron [req-32a7fa98-a0cd-4c89-b9e7-27f1f99ce0e4 req-ae932a6e-f4c5-4b36-a8dc-e924350c072b service nova] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Updated VIF entry in instance network info cache for port 6e95f83c-01f6-4bbe-800b-ed805d60f684. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 912.480878] env[69927]: DEBUG nova.network.neutron [req-32a7fa98-a0cd-4c89-b9e7-27f1f99ce0e4 req-ae932a6e-f4c5-4b36-a8dc-e924350c072b service nova] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Updating instance_info_cache with network_info: [{"id": "6e95f83c-01f6-4bbe-800b-ed805d60f684", "address": "fa:16:3e:85:c5:49", "network": {"id": "8b41ee82-5412-4d31-ae76-47e8663487eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-523653041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66cbe813421e40d1bd515411bc3c045a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ffb921-796a-40fe-9662-d3fc01547dcb", "external-id": "nsx-vlan-transportzone-331", "segmentation_id": 331, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e95f83c-01", "ovs_interfaceid": "6e95f83c-01f6-4bbe-800b-ed805d60f684", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.483086] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a25c44-6db8-47fe-94c7-e3a588d0a0f4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.487566] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Waiting for the task: (returnval){ [ 912.487566] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52efda6c-b129-3baf-af7c-c58f0b1c339f" [ 912.487566] env[69927]: _type = "Task" [ 912.487566] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.495156] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dfefa61-dc26-4ea0-91b0-3c8c10aab6a7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.505017] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52efda6c-b129-3baf-af7c-c58f0b1c339f, 'name': SearchDatastore_Task, 'duration_secs': 0.011278} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.505854] env[69927]: DEBUG oslo_concurrency.lockutils [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 912.506167] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] e1946033-4ec3-4561-afdf-a3b748f7c611/e1946033-4ec3-4561-afdf-a3b748f7c611.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 912.506537] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-30f1a061-adc5-4a8f-93ef-413c2e65a7a9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.517775] env[69927]: DEBUG nova.compute.provider_tree [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.526957] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Waiting for the task: (returnval){ [ 912.526957] env[69927]: value = "task-4096063" [ 912.526957] env[69927]: _type = "Task" [ 912.526957] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.536330] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4096063, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.557830] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 912.558715] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-897d55b5-9733-4d7c-890d-37ff6285dfa0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.566515] env[69927]: DEBUG oslo_vmware.api [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 912.566515] env[69927]: value = "task-4096064" [ 912.566515] env[69927]: _type = "Task" [ 912.566515] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.576855] env[69927]: DEBUG oslo_vmware.api [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096064, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.846943] env[69927]: DEBUG oslo_vmware.api [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096062, 'name': PowerOnVM_Task, 'duration_secs': 0.73333} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.847316] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 912.847316] env[69927]: INFO nova.compute.manager [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Took 10.26 seconds to spawn the instance on the hypervisor. [ 912.847449] env[69927]: DEBUG nova.compute.manager [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 912.848362] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95594bcb-13b8-4a2c-adc3-1b8604512729 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.905707] env[69927]: INFO nova.compute.manager [-] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Took 1.05 seconds to deallocate network for instance. [ 912.988273] env[69927]: DEBUG oslo_concurrency.lockutils [req-32a7fa98-a0cd-4c89-b9e7-27f1f99ce0e4 req-ae932a6e-f4c5-4b36-a8dc-e924350c072b service nova] Releasing lock "refresh_cache-e1946033-4ec3-4561-afdf-a3b748f7c611" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 912.989853] env[69927]: DEBUG nova.compute.manager [req-32a7fa98-a0cd-4c89-b9e7-27f1f99ce0e4 req-ae932a6e-f4c5-4b36-a8dc-e924350c072b service nova] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Received event network-vif-deleted-0a7e52ca-6c1d-456c-a839-ce5f626c210d {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 913.021743] env[69927]: DEBUG nova.scheduler.client.report [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 913.040579] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4096063, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.500691} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.041539] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] e1946033-4ec3-4561-afdf-a3b748f7c611/e1946033-4ec3-4561-afdf-a3b748f7c611.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 913.042049] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 913.042487] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d49bf0a2-2eb1-4da7-bee4-7b919a0e20d2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.050351] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Waiting for the task: (returnval){ [ 913.050351] env[69927]: value = "task-4096065" [ 913.050351] env[69927]: _type = "Task" [ 913.050351] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.062791] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4096065, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.077526] env[69927]: DEBUG oslo_vmware.api [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096064, 'name': PowerOffVM_Task, 'duration_secs': 0.256742} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.077526] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 913.077526] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Updating instance 'c3e8a429-8484-4b11-abe3-1cccf0992556' progress to 17 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 913.366281] env[69927]: INFO nova.compute.manager [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Took 51.06 seconds to build instance. [ 913.413342] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.528516] env[69927]: DEBUG oslo_concurrency.lockutils [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.664s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.529048] env[69927]: DEBUG nova.compute.manager [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 913.531596] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.066s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.535150] env[69927]: INFO nova.compute.claims [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 913.564366] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4096065, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06837} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.565714] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 913.565814] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44dd62ab-00ed-45b5-ad15-f5d499b92f8e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.589870] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] e1946033-4ec3-4561-afdf-a3b748f7c611/e1946033-4ec3-4561-afdf-a3b748f7c611.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 913.592990] env[69927]: DEBUG nova.virt.hardware [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:35:01Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 913.593304] env[69927]: DEBUG nova.virt.hardware [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 913.593469] env[69927]: DEBUG nova.virt.hardware [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 913.593573] env[69927]: DEBUG nova.virt.hardware [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 913.593739] env[69927]: DEBUG nova.virt.hardware [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 913.593873] env[69927]: DEBUG nova.virt.hardware [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 913.594127] env[69927]: DEBUG nova.virt.hardware [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 913.594269] env[69927]: DEBUG nova.virt.hardware [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 913.594438] env[69927]: DEBUG nova.virt.hardware [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 913.594601] env[69927]: DEBUG nova.virt.hardware [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 913.594773] env[69927]: DEBUG nova.virt.hardware [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 913.599548] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f5fcbfd-a46a-43a6-8ee8-66444edf374b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.613648] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c784fb5-0b5d-4273-9916-0c4823946425 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.631493] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Waiting for the task: (returnval){ [ 913.631493] env[69927]: value = "task-4096067" [ 913.631493] env[69927]: _type = "Task" [ 913.631493] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.632937] env[69927]: DEBUG oslo_vmware.api [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 913.632937] env[69927]: value = "task-4096066" [ 913.632937] env[69927]: _type = "Task" [ 913.632937] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.645408] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4096067, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.648827] env[69927]: DEBUG oslo_vmware.api [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096066, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.868333] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b2a29509-92c3-40cd-aaaa-ddc88781f034 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Lock "c3a531fd-647c-43b6-9d3d-fc6ecbc2445e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.343s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.039069] env[69927]: DEBUG nova.compute.utils [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 914.042470] env[69927]: DEBUG nova.compute.manager [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Not allocating networking since 'none' was specified. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 914.148303] env[69927]: DEBUG oslo_vmware.api [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096066, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.151915] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4096067, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.371163] env[69927]: DEBUG nova.compute.manager [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 914.543369] env[69927]: DEBUG nova.compute.manager [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 914.651270] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4096067, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.657289] env[69927]: DEBUG oslo_vmware.api [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096066, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.888589] env[69927]: DEBUG oslo_concurrency.lockutils [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 915.077419] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f010423-1064-40e8-a2ac-b331e32ed575 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.086168] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ccd2cc3-d4ea-47db-b60a-c3ccb14b787f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.126857] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc1202b-f21c-49ad-a5db-43d5dd8f9f1e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.137051] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735ddedf-8f8b-4c28-9cc7-2579b6ad2aae {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.163780] env[69927]: DEBUG nova.compute.provider_tree [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 915.172416] env[69927]: DEBUG oslo_vmware.api [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096066, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.172599] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4096067, 'name': ReconfigVM_Task, 'duration_secs': 1.03755} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.173195] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Reconfigured VM instance instance-00000039 to attach disk [datastore2] e1946033-4ec3-4561-afdf-a3b748f7c611/e1946033-4ec3-4561-afdf-a3b748f7c611.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 915.173920] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2cb9f6c-c24f-48c5-a90d-d724b53aa995 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.181945] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Waiting for the task: (returnval){ [ 915.181945] env[69927]: value = "task-4096068" [ 915.181945] env[69927]: _type = "Task" [ 915.181945] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.192699] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4096068, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.345810] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Acquiring lock "c87680be-227e-4a3e-92d3-c2310623bfe4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 915.346116] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Lock "c87680be-227e-4a3e-92d3-c2310623bfe4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 915.561712] env[69927]: DEBUG nova.compute.manager [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 915.589958] env[69927]: DEBUG nova.virt.hardware [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 915.590311] env[69927]: DEBUG nova.virt.hardware [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 915.590557] env[69927]: DEBUG nova.virt.hardware [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 915.590676] env[69927]: DEBUG nova.virt.hardware [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 915.590851] env[69927]: DEBUG nova.virt.hardware [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 915.591090] env[69927]: DEBUG nova.virt.hardware [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 915.591831] env[69927]: DEBUG nova.virt.hardware [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 915.591831] env[69927]: DEBUG nova.virt.hardware [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 915.591831] env[69927]: DEBUG nova.virt.hardware [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 915.591831] env[69927]: DEBUG nova.virt.hardware [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 915.592025] env[69927]: DEBUG nova.virt.hardware [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 915.592807] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45110d4c-7464-4d0c-979e-dbcb72478259 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.601602] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b40be8-d642-43b2-8982-b423d37afbad {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.616493] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Instance VIF info [] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 915.622188] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Creating folder: Project (4299cd2e278e4bb0bfb024d363df260f). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 915.622544] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b903fe29-c448-48d1-b0c6-ebf5d9ae5fad {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.634735] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Created folder: Project (4299cd2e278e4bb0bfb024d363df260f) in parent group-v811283. [ 915.634951] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Creating folder: Instances. Parent ref: group-v811458. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 915.635223] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d839df3-32a3-436e-bba1-50ee75cbc118 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.647454] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Created folder: Instances in parent group-v811458. [ 915.647699] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 915.648331] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 915.648549] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-901beaf5-9189-4849-af39-50bd45770a19 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.663823] env[69927]: DEBUG oslo_vmware.api [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096066, 'name': ReconfigVM_Task, 'duration_secs': 1.531139} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.664553] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Updating instance 'c3e8a429-8484-4b11-abe3-1cccf0992556' progress to 33 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 915.669572] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 915.669572] env[69927]: value = "task-4096071" [ 915.669572] env[69927]: _type = "Task" [ 915.669572] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.674034] env[69927]: DEBUG nova.scheduler.client.report [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 915.680347] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096071, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.692311] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4096068, 'name': Rename_Task, 'duration_secs': 0.165466} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.693260] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 915.693684] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c6107078-5ef2-474d-9dd1-fb343ccccc75 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.703162] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Waiting for the task: (returnval){ [ 915.703162] env[69927]: value = "task-4096072" [ 915.703162] env[69927]: _type = "Task" [ 915.703162] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.713237] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4096072, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.174530] env[69927]: DEBUG nova.virt.hardware [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:38:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='6803b7fa-1500-4d6d-8f68-b7ab4453032d',id=32,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1704385790',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 916.174933] env[69927]: DEBUG nova.virt.hardware [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 916.174933] env[69927]: DEBUG nova.virt.hardware [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 916.175115] env[69927]: DEBUG nova.virt.hardware [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 916.175261] env[69927]: DEBUG nova.virt.hardware [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 916.175452] env[69927]: DEBUG nova.virt.hardware [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 916.175751] env[69927]: DEBUG nova.virt.hardware [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 916.175929] env[69927]: DEBUG nova.virt.hardware [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 916.177076] env[69927]: DEBUG nova.virt.hardware [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 916.177284] env[69927]: DEBUG nova.virt.hardware [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 916.177468] env[69927]: DEBUG nova.virt.hardware [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 916.186402] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Reconfiguring VM instance instance-0000002f to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 916.187340] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.655s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.187340] env[69927]: DEBUG nova.compute.manager [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 916.189979] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06e45604-4cc7-4575-945f-12533f53af05 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.206851] env[69927]: DEBUG oslo_concurrency.lockutils [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.681s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.207124] env[69927]: DEBUG nova.objects.instance [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Lazy-loading 'resources' on Instance uuid 6be47dcb-ce00-4b81-9e69-35acabac046e {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 916.218521] env[69927]: DEBUG oslo_vmware.api [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 916.218521] env[69927]: value = "task-4096073" [ 916.218521] env[69927]: _type = "Task" [ 916.218521] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.230846] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096071, 'name': CreateVM_Task, 'duration_secs': 0.310579} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.230846] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4096072, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.232785] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 916.233294] env[69927]: DEBUG oslo_concurrency.lockutils [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.233733] env[69927]: DEBUG oslo_concurrency.lockutils [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 916.234542] env[69927]: DEBUG oslo_concurrency.lockutils [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 916.235637] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74c7c0b3-2aa7-41c8-b099-9c75e16f11d9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.245050] env[69927]: DEBUG oslo_vmware.api [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096073, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.249887] env[69927]: DEBUG oslo_vmware.api [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Waiting for the task: (returnval){ [ 916.249887] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52568841-fb4d-3412-6976-c3996b172d78" [ 916.249887] env[69927]: _type = "Task" [ 916.249887] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.262529] env[69927]: DEBUG oslo_vmware.api [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52568841-fb4d-3412-6976-c3996b172d78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.709098] env[69927]: DEBUG nova.compute.utils [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 916.710657] env[69927]: DEBUG nova.compute.manager [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 916.710864] env[69927]: DEBUG nova.network.neutron [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 916.728717] env[69927]: DEBUG oslo_vmware.api [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4096072, 'name': PowerOnVM_Task, 'duration_secs': 0.741925} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.730319] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 916.731175] env[69927]: INFO nova.compute.manager [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Took 8.94 seconds to spawn the instance on the hypervisor. [ 916.731427] env[69927]: DEBUG nova.compute.manager [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 916.739489] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532042d7-69b5-47c5-b405-20c220583083 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.751392] env[69927]: DEBUG oslo_vmware.api [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096073, 'name': ReconfigVM_Task, 'duration_secs': 0.209683} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.757290] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Reconfigured VM instance instance-0000002f to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 916.763130] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab2a9e0c-f09d-4cd4-a455-32acf4492f50 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.774063] env[69927]: DEBUG oslo_vmware.api [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52568841-fb4d-3412-6976-c3996b172d78, 'name': SearchDatastore_Task, 'duration_secs': 0.014083} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.783747] env[69927]: DEBUG oslo_concurrency.lockutils [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 916.783989] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 916.784261] env[69927]: DEBUG oslo_concurrency.lockutils [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.784413] env[69927]: DEBUG oslo_concurrency.lockutils [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 916.784593] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 916.792343] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] c3e8a429-8484-4b11-abe3-1cccf0992556/c3e8a429-8484-4b11-abe3-1cccf0992556.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 916.795165] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a7e57a37-2528-4560-b83e-85f399601035 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.797380] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c1b621c-fb4e-4b3d-b634-1c7d71703f0c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.819959] env[69927]: DEBUG oslo_vmware.api [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 916.819959] env[69927]: value = "task-4096074" [ 916.819959] env[69927]: _type = "Task" [ 916.819959] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.821655] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 916.822235] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 916.826997] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4524ff6e-5189-43ba-9bc5-b8edb720c1fc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.833762] env[69927]: DEBUG nova.policy [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '62cb970de5e046c2b0ab67aa5cae88d6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e42b3ace8db6445ab652bc1e2e0a3361', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 916.841937] env[69927]: DEBUG oslo_vmware.api [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096074, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.843721] env[69927]: DEBUG oslo_vmware.api [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Waiting for the task: (returnval){ [ 916.843721] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526d5329-e4e0-d36d-438a-0998cf33cb7e" [ 916.843721] env[69927]: _type = "Task" [ 916.843721] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.856855] env[69927]: DEBUG oslo_vmware.api [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526d5329-e4e0-d36d-438a-0998cf33cb7e, 'name': SearchDatastore_Task, 'duration_secs': 0.011213} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.857626] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9097198a-3f67-4709-891b-9888b5a78e7d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.866748] env[69927]: DEBUG oslo_vmware.api [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Waiting for the task: (returnval){ [ 916.866748] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525f9a81-43d3-10f3-7b77-c9b0f7f1f5ba" [ 916.866748] env[69927]: _type = "Task" [ 916.866748] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.878015] env[69927]: DEBUG oslo_vmware.api [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525f9a81-43d3-10f3-7b77-c9b0f7f1f5ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.214204] env[69927]: DEBUG nova.compute.manager [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 917.280745] env[69927]: INFO nova.compute.manager [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Took 49.49 seconds to build instance. [ 917.344666] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2205444c-b2eb-465a-bd6c-7d3f1c362da8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.347308] env[69927]: DEBUG oslo_vmware.api [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096074, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.352475] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e441dab7-31f7-431d-a808-0828902dfda3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.390703] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76628b45-510c-4468-9ac5-a1dce571540f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.399115] env[69927]: DEBUG oslo_vmware.api [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525f9a81-43d3-10f3-7b77-c9b0f7f1f5ba, 'name': SearchDatastore_Task, 'duration_secs': 0.0111} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.401219] env[69927]: DEBUG oslo_concurrency.lockutils [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 917.401496] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 9363c664-5848-408b-9b03-2dea4ceded90/9363c664-5848-408b-9b03-2dea4ceded90.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 917.401783] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e3643d76-67f1-4758-ae00-8ce33fb0f468 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.404586] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f78ef3e-2422-40a8-80fd-0e2eab568582 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.441852] env[69927]: DEBUG nova.compute.provider_tree [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 917.441852] env[69927]: DEBUG oslo_vmware.api [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Waiting for the task: (returnval){ [ 917.441852] env[69927]: value = "task-4096075" [ 917.441852] env[69927]: _type = "Task" [ 917.441852] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.441852] env[69927]: DEBUG oslo_vmware.api [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096075, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.581310] env[69927]: DEBUG nova.network.neutron [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Successfully created port: fa521cbd-9783-45a7-b712-2e14b9bf5139 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 917.783429] env[69927]: DEBUG oslo_concurrency.lockutils [None req-76d1e1b1-31cf-40ac-8586-29499da0f9a5 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Lock "e1946033-4ec3-4561-afdf-a3b748f7c611" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.384s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.836056] env[69927]: DEBUG oslo_vmware.api [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096074, 'name': ReconfigVM_Task, 'duration_secs': 0.8774} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.836521] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Reconfigured VM instance instance-0000002f to attach disk [datastore2] c3e8a429-8484-4b11-abe3-1cccf0992556/c3e8a429-8484-4b11-abe3-1cccf0992556.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 917.836873] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Updating instance 'c3e8a429-8484-4b11-abe3-1cccf0992556' progress to 50 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 917.925543] env[69927]: DEBUG nova.scheduler.client.report [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 917.947707] env[69927]: DEBUG oslo_vmware.api [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096075, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527488} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.948848] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 9363c664-5848-408b-9b03-2dea4ceded90/9363c664-5848-408b-9b03-2dea4ceded90.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 917.949308] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 917.949674] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5a958a8e-65c1-40a5-ac00-e45e5605b7a5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.958255] env[69927]: DEBUG oslo_vmware.api [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Waiting for the task: (returnval){ [ 917.958255] env[69927]: value = "task-4096076" [ 917.958255] env[69927]: _type = "Task" [ 917.958255] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.973396] env[69927]: DEBUG oslo_vmware.api [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096076, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.229928] env[69927]: DEBUG nova.compute.manager [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 918.263027] env[69927]: DEBUG nova.virt.hardware [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 918.263445] env[69927]: DEBUG nova.virt.hardware [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 918.263612] env[69927]: DEBUG nova.virt.hardware [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 918.263836] env[69927]: DEBUG nova.virt.hardware [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 918.264140] env[69927]: DEBUG nova.virt.hardware [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 918.264323] env[69927]: DEBUG nova.virt.hardware [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 918.264622] env[69927]: DEBUG nova.virt.hardware [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 918.264712] env[69927]: DEBUG nova.virt.hardware [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 918.264886] env[69927]: DEBUG nova.virt.hardware [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 918.265147] env[69927]: DEBUG nova.virt.hardware [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 918.265371] env[69927]: DEBUG nova.virt.hardware [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 918.266340] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c19ebba-fe04-44dc-b389-61f39768d633 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.274995] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd86d77-aba3-4a70-b6eb-6e098fd57453 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.290238] env[69927]: DEBUG nova.compute.manager [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 918.345087] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-749bacf2-3d33-4b3a-b621-b85dd8e260f3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.365328] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2031e1fd-41a9-416e-b75e-71b2b542e991 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.391935] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Updating instance 'c3e8a429-8484-4b11-abe3-1cccf0992556' progress to 67 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 918.443530] env[69927]: DEBUG oslo_concurrency.lockutils [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.235s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.444553] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.836s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.446512] env[69927]: INFO nova.compute.claims [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 918.469540] env[69927]: DEBUG oslo_vmware.api [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096076, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071608} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.469850] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 918.470784] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70983b1a-7a79-4a7d-aa96-dca67f57cb5d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.494519] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] 9363c664-5848-408b-9b03-2dea4ceded90/9363c664-5848-408b-9b03-2dea4ceded90.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 918.498274] env[69927]: INFO nova.scheduler.client.report [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Deleted allocations for instance 6be47dcb-ce00-4b81-9e69-35acabac046e [ 918.498274] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08ae4817-b224-40d3-a12b-72148accbdff {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.522125] env[69927]: DEBUG oslo_vmware.api [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Waiting for the task: (returnval){ [ 918.522125] env[69927]: value = "task-4096077" [ 918.522125] env[69927]: _type = "Task" [ 918.522125] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.532253] env[69927]: DEBUG oslo_vmware.api [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096077, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.533437] env[69927]: DEBUG nova.compute.manager [req-e8bc7651-bf9c-4478-9939-9bf154393efe req-f4280856-a3e8-4f85-900a-5336f6d09605 service nova] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Received event network-changed-6e95f83c-01f6-4bbe-800b-ed805d60f684 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 918.533612] env[69927]: DEBUG nova.compute.manager [req-e8bc7651-bf9c-4478-9939-9bf154393efe req-f4280856-a3e8-4f85-900a-5336f6d09605 service nova] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Refreshing instance network info cache due to event network-changed-6e95f83c-01f6-4bbe-800b-ed805d60f684. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 918.533827] env[69927]: DEBUG oslo_concurrency.lockutils [req-e8bc7651-bf9c-4478-9939-9bf154393efe req-f4280856-a3e8-4f85-900a-5336f6d09605 service nova] Acquiring lock "refresh_cache-e1946033-4ec3-4561-afdf-a3b748f7c611" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.533962] env[69927]: DEBUG oslo_concurrency.lockutils [req-e8bc7651-bf9c-4478-9939-9bf154393efe req-f4280856-a3e8-4f85-900a-5336f6d09605 service nova] Acquired lock "refresh_cache-e1946033-4ec3-4561-afdf-a3b748f7c611" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 918.534136] env[69927]: DEBUG nova.network.neutron [req-e8bc7651-bf9c-4478-9939-9bf154393efe req-f4280856-a3e8-4f85-900a-5336f6d09605 service nova] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Refreshing network info cache for port 6e95f83c-01f6-4bbe-800b-ed805d60f684 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 918.815431] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 918.981045] env[69927]: DEBUG nova.network.neutron [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Port 32049b49-d761-48ff-8938-d76ebe86f62e binding to destination host cpu-1 is already ACTIVE {{(pid=69927) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 919.019171] env[69927]: DEBUG oslo_concurrency.lockutils [None req-be56dda3-46ac-498b-8391-fc1dfdfbee89 tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Lock "6be47dcb-ce00-4b81-9e69-35acabac046e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.962s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.036339] env[69927]: DEBUG oslo_vmware.api [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096077, 'name': ReconfigVM_Task, 'duration_secs': 0.27926} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.039478] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Reconfigured VM instance instance-0000003a to attach disk [datastore1] 9363c664-5848-408b-9b03-2dea4ceded90/9363c664-5848-408b-9b03-2dea4ceded90.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 919.045307] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-05bb9090-70c1-4118-a885-93f6464b8616 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.053890] env[69927]: DEBUG oslo_vmware.api [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Waiting for the task: (returnval){ [ 919.053890] env[69927]: value = "task-4096078" [ 919.053890] env[69927]: _type = "Task" [ 919.053890] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.063266] env[69927]: DEBUG oslo_vmware.api [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096078, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.271905] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ff1a298b-cdf7-4bfe-856b-f9d10870404c tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquiring lock "e1946033-4ec3-4561-afdf-a3b748f7c611" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 919.272182] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ff1a298b-cdf7-4bfe-856b-f9d10870404c tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Lock "e1946033-4ec3-4561-afdf-a3b748f7c611" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 919.272328] env[69927]: INFO nova.compute.manager [None req-ff1a298b-cdf7-4bfe-856b-f9d10870404c tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Rebooting instance [ 919.543114] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquiring lock "77c6ce9e-5e15-41e4-aa81-1ef01248aa32" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 919.543114] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Lock "77c6ce9e-5e15-41e4-aa81-1ef01248aa32" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 919.543114] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquiring lock "77c6ce9e-5e15-41e4-aa81-1ef01248aa32-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 919.543114] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Lock "77c6ce9e-5e15-41e4-aa81-1ef01248aa32-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 919.543114] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Lock "77c6ce9e-5e15-41e4-aa81-1ef01248aa32-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.546050] env[69927]: INFO nova.compute.manager [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Terminating instance [ 919.572157] env[69927]: DEBUG oslo_vmware.api [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096078, 'name': Rename_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.740479] env[69927]: DEBUG nova.network.neutron [req-e8bc7651-bf9c-4478-9939-9bf154393efe req-f4280856-a3e8-4f85-900a-5336f6d09605 service nova] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Updated VIF entry in instance network info cache for port 6e95f83c-01f6-4bbe-800b-ed805d60f684. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 919.740920] env[69927]: DEBUG nova.network.neutron [req-e8bc7651-bf9c-4478-9939-9bf154393efe req-f4280856-a3e8-4f85-900a-5336f6d09605 service nova] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Updating instance_info_cache with network_info: [{"id": "6e95f83c-01f6-4bbe-800b-ed805d60f684", "address": "fa:16:3e:85:c5:49", "network": {"id": "8b41ee82-5412-4d31-ae76-47e8663487eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-523653041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66cbe813421e40d1bd515411bc3c045a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ffb921-796a-40fe-9662-d3fc01547dcb", "external-id": "nsx-vlan-transportzone-331", "segmentation_id": 331, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e95f83c-01", "ovs_interfaceid": "6e95f83c-01f6-4bbe-800b-ed805d60f684", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.798637] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ff1a298b-cdf7-4bfe-856b-f9d10870404c tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquiring lock "refresh_cache-e1946033-4ec3-4561-afdf-a3b748f7c611" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.989465] env[69927]: DEBUG nova.network.neutron [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Successfully updated port: fa521cbd-9783-45a7-b712-2e14b9bf5139 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 920.018033] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "c3e8a429-8484-4b11-abe3-1cccf0992556-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 920.019068] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "c3e8a429-8484-4b11-abe3-1cccf0992556-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.019310] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "c3e8a429-8484-4b11-abe3-1cccf0992556-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.049281] env[69927]: DEBUG nova.compute.manager [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 920.049281] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 920.050033] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c0ed292-f33d-4158-84da-52f469d939e9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.063940] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 920.064750] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c87cd65e-4623-43d3-91dd-5bc4ff7c884d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.070610] env[69927]: DEBUG oslo_vmware.api [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096078, 'name': Rename_Task, 'duration_secs': 0.831789} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.072972] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 920.072972] env[69927]: DEBUG oslo_vmware.api [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 920.072972] env[69927]: value = "task-4096079" [ 920.072972] env[69927]: _type = "Task" [ 920.072972] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.072972] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5097116b-9890-42b0-8395-dafadda1d423 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.094564] env[69927]: DEBUG oslo_vmware.api [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4096079, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.094564] env[69927]: DEBUG oslo_vmware.api [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Waiting for the task: (returnval){ [ 920.094564] env[69927]: value = "task-4096080" [ 920.094564] env[69927]: _type = "Task" [ 920.094564] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.104875] env[69927]: DEBUG oslo_vmware.api [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096080, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.143916] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc5eade-abd9-4223-8f86-66ca2c11f7c9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.153198] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab141a5-827d-49d0-86b0-127a49db5201 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.209630] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b074527-acd0-457b-aad8-feb31ef1e38f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.220147] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304ce042-b3ef-4faf-8e12-9ae473d26f54 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.237465] env[69927]: DEBUG nova.compute.provider_tree [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.246620] env[69927]: DEBUG oslo_concurrency.lockutils [req-e8bc7651-bf9c-4478-9939-9bf154393efe req-f4280856-a3e8-4f85-900a-5336f6d09605 service nova] Releasing lock "refresh_cache-e1946033-4ec3-4561-afdf-a3b748f7c611" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 920.246620] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ff1a298b-cdf7-4bfe-856b-f9d10870404c tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquired lock "refresh_cache-e1946033-4ec3-4561-afdf-a3b748f7c611" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 920.246803] env[69927]: DEBUG nova.network.neutron [None req-ff1a298b-cdf7-4bfe-856b-f9d10870404c tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 920.504110] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Acquiring lock "refresh_cache-50eedb80-d4bc-42c4-9686-6549cbd675b7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.504458] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Acquired lock "refresh_cache-50eedb80-d4bc-42c4-9686-6549cbd675b7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 920.504458] env[69927]: DEBUG nova.network.neutron [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 920.575126] env[69927]: DEBUG nova.compute.manager [req-f1b892f2-d39e-4ef5-a76b-cb9c497e0979 req-6e2edf06-e868-4719-9bfc-d2151ba53d64 service nova] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Received event network-vif-plugged-fa521cbd-9783-45a7-b712-2e14b9bf5139 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 920.576286] env[69927]: DEBUG oslo_concurrency.lockutils [req-f1b892f2-d39e-4ef5-a76b-cb9c497e0979 req-6e2edf06-e868-4719-9bfc-d2151ba53d64 service nova] Acquiring lock "50eedb80-d4bc-42c4-9686-6549cbd675b7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 920.576566] env[69927]: DEBUG oslo_concurrency.lockutils [req-f1b892f2-d39e-4ef5-a76b-cb9c497e0979 req-6e2edf06-e868-4719-9bfc-d2151ba53d64 service nova] Lock "50eedb80-d4bc-42c4-9686-6549cbd675b7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.576766] env[69927]: DEBUG oslo_concurrency.lockutils [req-f1b892f2-d39e-4ef5-a76b-cb9c497e0979 req-6e2edf06-e868-4719-9bfc-d2151ba53d64 service nova] Lock "50eedb80-d4bc-42c4-9686-6549cbd675b7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.577028] env[69927]: DEBUG nova.compute.manager [req-f1b892f2-d39e-4ef5-a76b-cb9c497e0979 req-6e2edf06-e868-4719-9bfc-d2151ba53d64 service nova] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] No waiting events found dispatching network-vif-plugged-fa521cbd-9783-45a7-b712-2e14b9bf5139 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 920.577649] env[69927]: WARNING nova.compute.manager [req-f1b892f2-d39e-4ef5-a76b-cb9c497e0979 req-6e2edf06-e868-4719-9bfc-d2151ba53d64 service nova] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Received unexpected event network-vif-plugged-fa521cbd-9783-45a7-b712-2e14b9bf5139 for instance with vm_state building and task_state spawning. [ 920.577649] env[69927]: DEBUG nova.compute.manager [req-f1b892f2-d39e-4ef5-a76b-cb9c497e0979 req-6e2edf06-e868-4719-9bfc-d2151ba53d64 service nova] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Received event network-changed-fa521cbd-9783-45a7-b712-2e14b9bf5139 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 920.577763] env[69927]: DEBUG nova.compute.manager [req-f1b892f2-d39e-4ef5-a76b-cb9c497e0979 req-6e2edf06-e868-4719-9bfc-d2151ba53d64 service nova] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Refreshing instance network info cache due to event network-changed-fa521cbd-9783-45a7-b712-2e14b9bf5139. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 920.578320] env[69927]: DEBUG oslo_concurrency.lockutils [req-f1b892f2-d39e-4ef5-a76b-cb9c497e0979 req-6e2edf06-e868-4719-9bfc-d2151ba53d64 service nova] Acquiring lock "refresh_cache-50eedb80-d4bc-42c4-9686-6549cbd675b7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.589932] env[69927]: DEBUG oslo_vmware.api [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4096079, 'name': PowerOffVM_Task, 'duration_secs': 0.352616} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.590301] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 920.590503] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 920.590784] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8fc11afb-5378-408f-baee-fd90b38430bb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.605252] env[69927]: DEBUG oslo_vmware.api [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096080, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.681246] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 920.681246] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 920.681358] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Deleting the datastore file [datastore1] 77c6ce9e-5e15-41e4-aa81-1ef01248aa32 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 920.681595] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-59153faa-f289-4376-ba55-0f4d56d64091 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.694272] env[69927]: DEBUG oslo_vmware.api [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for the task: (returnval){ [ 920.694272] env[69927]: value = "task-4096082" [ 920.694272] env[69927]: _type = "Task" [ 920.694272] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.704792] env[69927]: DEBUG oslo_vmware.api [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4096082, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.743334] env[69927]: DEBUG nova.scheduler.client.report [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 921.061384] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "refresh_cache-c3e8a429-8484-4b11-abe3-1cccf0992556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.061780] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquired lock "refresh_cache-c3e8a429-8484-4b11-abe3-1cccf0992556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 921.061848] env[69927]: DEBUG nova.network.neutron [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 921.105904] env[69927]: DEBUG oslo_vmware.api [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096080, 'name': PowerOnVM_Task, 'duration_secs': 0.545238} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.106025] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 921.106268] env[69927]: INFO nova.compute.manager [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Took 5.54 seconds to spawn the instance on the hypervisor. [ 921.106483] env[69927]: DEBUG nova.compute.manager [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 921.107323] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed212391-1a96-46e5-b3d5-fd189e54ef40 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.121243] env[69927]: DEBUG nova.network.neutron [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 921.166712] env[69927]: DEBUG nova.network.neutron [None req-ff1a298b-cdf7-4bfe-856b-f9d10870404c tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Updating instance_info_cache with network_info: [{"id": "6e95f83c-01f6-4bbe-800b-ed805d60f684", "address": "fa:16:3e:85:c5:49", "network": {"id": "8b41ee82-5412-4d31-ae76-47e8663487eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-523653041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66cbe813421e40d1bd515411bc3c045a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ffb921-796a-40fe-9662-d3fc01547dcb", "external-id": "nsx-vlan-transportzone-331", "segmentation_id": 331, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e95f83c-01", "ovs_interfaceid": "6e95f83c-01f6-4bbe-800b-ed805d60f684", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.210237] env[69927]: DEBUG oslo_vmware.api [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Task: {'id': task-4096082, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.215782} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.210654] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 921.210848] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 921.210921] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 921.211286] env[69927]: INFO nova.compute.manager [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Took 1.16 seconds to destroy the instance on the hypervisor. [ 921.211640] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 921.216912] env[69927]: DEBUG nova.compute.manager [-] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 921.216912] env[69927]: DEBUG nova.network.neutron [-] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 921.247723] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.803s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.248300] env[69927]: DEBUG nova.compute.manager [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 921.251065] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.431s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.251152] env[69927]: DEBUG nova.objects.instance [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lazy-loading 'resources' on Instance uuid a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 921.507405] env[69927]: DEBUG nova.network.neutron [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Updating instance_info_cache with network_info: [{"id": "fa521cbd-9783-45a7-b712-2e14b9bf5139", "address": "fa:16:3e:46:d1:03", "network": {"id": "7db2f714-1044-45d6-9e7c-3b7c1524c691", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1880105821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e42b3ace8db6445ab652bc1e2e0a3361", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa521cbd-97", "ovs_interfaceid": "fa521cbd-9783-45a7-b712-2e14b9bf5139", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.638000] env[69927]: INFO nova.compute.manager [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Took 46.84 seconds to build instance. [ 921.672040] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ff1a298b-cdf7-4bfe-856b-f9d10870404c tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Releasing lock "refresh_cache-e1946033-4ec3-4561-afdf-a3b748f7c611" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 921.754524] env[69927]: DEBUG nova.compute.utils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 921.756103] env[69927]: DEBUG nova.compute.manager [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 921.756103] env[69927]: DEBUG nova.network.neutron [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 921.856463] env[69927]: DEBUG nova.policy [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '96e9df123ad74df4997941b3942e4330', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5551a241903f4911b27b7f4ab1c2f29d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 922.011049] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Releasing lock "refresh_cache-50eedb80-d4bc-42c4-9686-6549cbd675b7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 922.011407] env[69927]: DEBUG nova.compute.manager [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Instance network_info: |[{"id": "fa521cbd-9783-45a7-b712-2e14b9bf5139", "address": "fa:16:3e:46:d1:03", "network": {"id": "7db2f714-1044-45d6-9e7c-3b7c1524c691", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1880105821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e42b3ace8db6445ab652bc1e2e0a3361", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa521cbd-97", "ovs_interfaceid": "fa521cbd-9783-45a7-b712-2e14b9bf5139", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 922.011713] env[69927]: DEBUG oslo_concurrency.lockutils [req-f1b892f2-d39e-4ef5-a76b-cb9c497e0979 req-6e2edf06-e868-4719-9bfc-d2151ba53d64 service nova] Acquired lock "refresh_cache-50eedb80-d4bc-42c4-9686-6549cbd675b7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 922.011902] env[69927]: DEBUG nova.network.neutron [req-f1b892f2-d39e-4ef5-a76b-cb9c497e0979 req-6e2edf06-e868-4719-9bfc-d2151ba53d64 service nova] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Refreshing network info cache for port fa521cbd-9783-45a7-b712-2e14b9bf5139 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 922.013132] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:d1:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '55c757ac-f8b2-466d-b634-07dbd100b312', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fa521cbd-9783-45a7-b712-2e14b9bf5139', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 922.027582] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Creating folder: Project (e42b3ace8db6445ab652bc1e2e0a3361). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 922.033934] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9441c93b-8471-4560-9747-da5bf198b2f2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.047040] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Created folder: Project (e42b3ace8db6445ab652bc1e2e0a3361) in parent group-v811283. [ 922.047313] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Creating folder: Instances. Parent ref: group-v811461. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 922.053033] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f53c2cbb-2e99-4a9f-b8e2-253098f854f4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.060996] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Created folder: Instances in parent group-v811461. [ 922.061293] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 922.061495] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 922.062819] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2b4dae73-b793-43a4-8964-0389c073b71c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.087262] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 922.087262] env[69927]: value = "task-4096085" [ 922.087262] env[69927]: _type = "Task" [ 922.087262] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.096975] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096085, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.123578] env[69927]: DEBUG nova.network.neutron [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Updating instance_info_cache with network_info: [{"id": "32049b49-d761-48ff-8938-d76ebe86f62e", "address": "fa:16:3e:64:c8:10", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.140", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32049b49-d7", "ovs_interfaceid": "32049b49-d761-48ff-8938-d76ebe86f62e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.139872] env[69927]: DEBUG oslo_concurrency.lockutils [None req-16debb7d-5478-41cf-bd91-d36ea419c7d0 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Lock "9363c664-5848-408b-9b03-2dea4ceded90" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.672s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 922.175616] env[69927]: DEBUG nova.compute.manager [None req-ff1a298b-cdf7-4bfe-856b-f9d10870404c tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 922.176749] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f21853a-7577-477e-82dd-d5cb39d7c1e4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.232485] env[69927]: DEBUG nova.network.neutron [-] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.263289] env[69927]: DEBUG nova.compute.manager [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 922.335128] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Acquiring lock "7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.335509] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Lock "7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.352430] env[69927]: DEBUG nova.network.neutron [req-f1b892f2-d39e-4ef5-a76b-cb9c497e0979 req-6e2edf06-e868-4719-9bfc-d2151ba53d64 service nova] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Updated VIF entry in instance network info cache for port fa521cbd-9783-45a7-b712-2e14b9bf5139. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 922.352795] env[69927]: DEBUG nova.network.neutron [req-f1b892f2-d39e-4ef5-a76b-cb9c497e0979 req-6e2edf06-e868-4719-9bfc-d2151ba53d64 service nova] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Updating instance_info_cache with network_info: [{"id": "fa521cbd-9783-45a7-b712-2e14b9bf5139", "address": "fa:16:3e:46:d1:03", "network": {"id": "7db2f714-1044-45d6-9e7c-3b7c1524c691", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1880105821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e42b3ace8db6445ab652bc1e2e0a3361", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa521cbd-97", "ovs_interfaceid": "fa521cbd-9783-45a7-b712-2e14b9bf5139", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.383294] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0fb60ab6-5646-4591-9454-29b0de24efc2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "e1b3d0bc-a251-4dbd-89a6-216a2f2c1313" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.383540] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0fb60ab6-5646-4591-9454-29b0de24efc2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "e1b3d0bc-a251-4dbd-89a6-216a2f2c1313" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.451654] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f281d0-bdd6-4a7d-806a-fc378195622d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.460282] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-134ea59f-7eaf-45b2-930c-b003f2b67076 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.492658] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f58e90b-b2d1-4c11-a233-d80aa204c205 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.501775] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf3a384-e64b-494d-920a-259162177d30 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.506288] env[69927]: DEBUG nova.network.neutron [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Successfully created port: e6bbd21d-3bfb-40a1-ab40-d734248c04fb {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 922.518542] env[69927]: DEBUG nova.compute.provider_tree [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 922.598526] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096085, 'name': CreateVM_Task, 'duration_secs': 0.369331} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.598698] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 922.599409] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.599580] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 922.599900] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 922.600176] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca2cf6f4-06af-4208-902e-1d7b14a1a092 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.605513] env[69927]: DEBUG oslo_vmware.api [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Waiting for the task: (returnval){ [ 922.605513] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e108aa-c8a9-acbc-5f11-823ed4a7f2af" [ 922.605513] env[69927]: _type = "Task" [ 922.605513] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.617196] env[69927]: DEBUG oslo_vmware.api [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e108aa-c8a9-acbc-5f11-823ed4a7f2af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.626569] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Releasing lock "refresh_cache-c3e8a429-8484-4b11-abe3-1cccf0992556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 922.645775] env[69927]: DEBUG nova.compute.manager [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 922.735146] env[69927]: INFO nova.compute.manager [-] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Took 1.52 seconds to deallocate network for instance. [ 922.856672] env[69927]: DEBUG oslo_concurrency.lockutils [req-f1b892f2-d39e-4ef5-a76b-cb9c497e0979 req-6e2edf06-e868-4719-9bfc-d2151ba53d64 service nova] Releasing lock "refresh_cache-50eedb80-d4bc-42c4-9686-6549cbd675b7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 922.887837] env[69927]: DEBUG nova.compute.utils [None req-0fb60ab6-5646-4591-9454-29b0de24efc2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 923.040081] env[69927]: ERROR nova.scheduler.client.report [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] [req-571c165c-983a-4445-9107-5bf527454145] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2f529b36-df5f-4b37-8103-68f74f737726. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-571c165c-983a-4445-9107-5bf527454145"}]} [ 923.059014] env[69927]: DEBUG nova.scheduler.client.report [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Refreshing inventories for resource provider 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 923.076119] env[69927]: DEBUG nova.scheduler.client.report [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Updating ProviderTree inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 923.076421] env[69927]: DEBUG nova.compute.provider_tree [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 923.089594] env[69927]: DEBUG nova.scheduler.client.report [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Refreshing aggregate associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, aggregates: None {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 923.114058] env[69927]: DEBUG nova.compute.manager [req-f37af498-504f-43e3-96e4-2e3a31c3d2c9 req-ef3c39b2-5a85-4b86-be97-c480e5bf431d service nova] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Received event network-vif-deleted-f02a1745-61d5-4414-88fe-680d9e7bba72 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 923.115510] env[69927]: DEBUG nova.scheduler.client.report [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Refreshing trait associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 923.124117] env[69927]: DEBUG oslo_vmware.api [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e108aa-c8a9-acbc-5f11-823ed4a7f2af, 'name': SearchDatastore_Task, 'duration_secs': 0.010672} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.124921] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 923.125307] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 923.125763] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.126036] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 923.126305] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 923.127104] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1289311b-6be3-4f2c-bf99-622f4349d5fa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.138957] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 923.139349] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 923.142477] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-113134f9-0f67-4f79-9c47-75ddd23215c2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.154135] env[69927]: DEBUG oslo_vmware.api [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Waiting for the task: (returnval){ [ 923.154135] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]524c8197-9e14-7b3d-d096-72d755960111" [ 923.154135] env[69927]: _type = "Task" [ 923.154135] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.162724] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b65a30-e50e-4fce-a2c3-de1a3ccea6b1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.170550] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 923.179353] env[69927]: DEBUG oslo_vmware.api [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]524c8197-9e14-7b3d-d096-72d755960111, 'name': SearchDatastore_Task, 'duration_secs': 0.01248} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.198596] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5aa3f4f2-afa1-4034-abb0-470b4a39bcf8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.202148] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc9c11fa-5f42-4e1e-9215-d688d7825ba9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.208017] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-889e41f2-dd36-4cdc-bd1b-3aad12e6eb17 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.215932] env[69927]: DEBUG oslo_vmware.api [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Waiting for the task: (returnval){ [ 923.215932] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525b342e-e783-71a0-0c33-e225a024b2ae" [ 923.215932] env[69927]: _type = "Task" [ 923.215932] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.221203] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ff1a298b-cdf7-4bfe-856b-f9d10870404c tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Doing hard reboot of VM {{(pid=69927) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 923.221649] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Updating instance 'c3e8a429-8484-4b11-abe3-1cccf0992556' progress to 83 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 923.232250] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-e50cc40b-6a88-444e-9597-5a01c64717a2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.241335] env[69927]: DEBUG oslo_vmware.api [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525b342e-e783-71a0-0c33-e225a024b2ae, 'name': SearchDatastore_Task, 'duration_secs': 0.013905} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.243109] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 923.243408] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 50eedb80-d4bc-42c4-9686-6549cbd675b7/50eedb80-d4bc-42c4-9686-6549cbd675b7.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 923.244494] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 923.244857] env[69927]: DEBUG oslo_vmware.api [None req-ff1a298b-cdf7-4bfe-856b-f9d10870404c tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Waiting for the task: (returnval){ [ 923.244857] env[69927]: value = "task-4096086" [ 923.244857] env[69927]: _type = "Task" [ 923.244857] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.248303] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-107c1d3e-7819-422e-b692-c0ec75a508aa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.267652] env[69927]: DEBUG oslo_vmware.api [None req-ff1a298b-cdf7-4bfe-856b-f9d10870404c tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4096086, 'name': ResetVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.273882] env[69927]: DEBUG oslo_vmware.api [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Waiting for the task: (returnval){ [ 923.273882] env[69927]: value = "task-4096087" [ 923.273882] env[69927]: _type = "Task" [ 923.273882] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.274539] env[69927]: DEBUG nova.compute.manager [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 923.288033] env[69927]: DEBUG oslo_vmware.api [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Task: {'id': task-4096087, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.311898] env[69927]: DEBUG nova.virt.hardware [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 923.312325] env[69927]: DEBUG nova.virt.hardware [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 923.312583] env[69927]: DEBUG nova.virt.hardware [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 923.312888] env[69927]: DEBUG nova.virt.hardware [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 923.313097] env[69927]: DEBUG nova.virt.hardware [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 923.314030] env[69927]: DEBUG nova.virt.hardware [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 923.314030] env[69927]: DEBUG nova.virt.hardware [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 923.314030] env[69927]: DEBUG nova.virt.hardware [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 923.314030] env[69927]: DEBUG nova.virt.hardware [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 923.314274] env[69927]: DEBUG nova.virt.hardware [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 923.314274] env[69927]: DEBUG nova.virt.hardware [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 923.315231] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b592d485-bed7-496a-9e8e-38fe9e0c7c90 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.325608] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b65c27e-6be3-4ba6-b4fb-43a746c3c01b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.391585] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0fb60ab6-5646-4591-9454-29b0de24efc2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "e1b3d0bc-a251-4dbd-89a6-216a2f2c1313" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.737095] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 923.738036] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-28797d1c-475a-4426-abc5-cc8236ffdd15 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.740932] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1591826-2e58-4565-b074-e63075c02ef6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.750057] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-174678fe-4510-4548-b9d2-108991af278c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.754754] env[69927]: DEBUG oslo_vmware.api [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 923.754754] env[69927]: value = "task-4096088" [ 923.754754] env[69927]: _type = "Task" [ 923.754754] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.791418] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d95f5cc0-5df2-4e31-904f-454fa6b5e196 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.803868] env[69927]: DEBUG oslo_vmware.api [None req-ff1a298b-cdf7-4bfe-856b-f9d10870404c tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4096086, 'name': ResetVM_Task, 'duration_secs': 0.154662} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.804135] env[69927]: DEBUG oslo_vmware.api [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096088, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.805138] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ff1a298b-cdf7-4bfe-856b-f9d10870404c tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Did hard reboot of VM {{(pid=69927) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 923.805392] env[69927]: DEBUG nova.compute.manager [None req-ff1a298b-cdf7-4bfe-856b-f9d10870404c tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 923.806248] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f000ada-4fda-489a-8281-cf07329120e7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.815325] env[69927]: DEBUG oslo_vmware.api [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Task: {'id': task-4096087, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.543864} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.815733] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59a5c8c6-c580-4c77-be80-f066d361a578 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.822021] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 50eedb80-d4bc-42c4-9686-6549cbd675b7/50eedb80-d4bc-42c4-9686-6549cbd675b7.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 923.822021] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 923.822680] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a2a87c11-2107-499f-a04a-7b7145c400f4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.838122] env[69927]: DEBUG nova.compute.provider_tree [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 923.841168] env[69927]: DEBUG oslo_vmware.api [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Waiting for the task: (returnval){ [ 923.841168] env[69927]: value = "task-4096089" [ 923.841168] env[69927]: _type = "Task" [ 923.841168] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.851405] env[69927]: DEBUG oslo_vmware.api [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Task: {'id': task-4096089, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.256220] env[69927]: DEBUG nova.network.neutron [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Successfully updated port: e6bbd21d-3bfb-40a1-ab40-d734248c04fb {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 924.274403] env[69927]: DEBUG oslo_vmware.api [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096088, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.332806] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ff1a298b-cdf7-4bfe-856b-f9d10870404c tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Lock "e1946033-4ec3-4561-afdf-a3b748f7c611" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 5.060s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.354384] env[69927]: DEBUG oslo_vmware.api [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Task: {'id': task-4096089, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.24534} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.354615] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 924.355521] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf0b83f-0cbe-4c84-a3bc-b364ab8bda57 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.381026] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] 50eedb80-d4bc-42c4-9686-6549cbd675b7/50eedb80-d4bc-42c4-9686-6549cbd675b7.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 924.382136] env[69927]: DEBUG nova.scheduler.client.report [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Updated inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 with generation 89 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 924.382482] env[69927]: DEBUG nova.compute.provider_tree [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Updating resource provider 2f529b36-df5f-4b37-8103-68f74f737726 generation from 89 to 90 during operation: update_inventory {{(pid=69927) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 924.382595] env[69927]: DEBUG nova.compute.provider_tree [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 924.386338] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-175c4403-22a8-466f-b86f-6befe9b65876 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.410963] env[69927]: DEBUG oslo_vmware.api [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Waiting for the task: (returnval){ [ 924.410963] env[69927]: value = "task-4096090" [ 924.410963] env[69927]: _type = "Task" [ 924.410963] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.420619] env[69927]: DEBUG oslo_vmware.api [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Task: {'id': task-4096090, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.465510] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0fb60ab6-5646-4591-9454-29b0de24efc2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "e1b3d0bc-a251-4dbd-89a6-216a2f2c1313" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.465886] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0fb60ab6-5646-4591-9454-29b0de24efc2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "e1b3d0bc-a251-4dbd-89a6-216a2f2c1313" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.466225] env[69927]: INFO nova.compute.manager [None req-0fb60ab6-5646-4591-9454-29b0de24efc2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Attaching volume bcac983c-b893-4914-9a04-f6f06dd0347e to /dev/sdb [ 924.504228] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b923194c-f3d7-484a-b815-3e1ec54056ab {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.512198] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a76ad50-d51d-46fa-8b85-7f17bb0103bf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.527682] env[69927]: DEBUG nova.virt.block_device [None req-0fb60ab6-5646-4591-9454-29b0de24efc2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Updating existing volume attachment record: 0bf04d48-e4a1-4dcc-be00-08a1dfa034e2 {{(pid=69927) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 924.758957] env[69927]: INFO nova.compute.manager [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Rebuilding instance [ 924.762274] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "refresh_cache-8be7e64c-7bc6-41a0-ada5-0a5057a2af45" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.762274] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquired lock "refresh_cache-8be7e64c-7bc6-41a0-ada5-0a5057a2af45" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 924.762274] env[69927]: DEBUG nova.network.neutron [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 924.772563] env[69927]: DEBUG oslo_vmware.api [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096088, 'name': PowerOnVM_Task, 'duration_secs': 0.971766} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.772835] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 924.773049] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-39c87b46-9e1d-4bb6-8e9e-8d770817311b tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Updating instance 'c3e8a429-8484-4b11-abe3-1cccf0992556' progress to 100 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 924.836613] env[69927]: DEBUG nova.compute.manager [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 924.837644] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36fed75-d186-4059-8ccd-030b6c4a272c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.906310] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.655s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.908716] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 35.062s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.908914] env[69927]: DEBUG nova.objects.instance [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69927) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 924.923336] env[69927]: DEBUG oslo_vmware.api [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Task: {'id': task-4096090, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.942936] env[69927]: INFO nova.scheduler.client.report [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Deleted allocations for instance a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5 [ 925.189208] env[69927]: DEBUG nova.compute.manager [req-b208d32c-ec8a-4140-ad90-29cc6689498b req-0568f05c-1e8c-42ac-bf8e-1637ec47e1f8 service nova] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Received event network-vif-plugged-e6bbd21d-3bfb-40a1-ab40-d734248c04fb {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 925.189548] env[69927]: DEBUG oslo_concurrency.lockutils [req-b208d32c-ec8a-4140-ad90-29cc6689498b req-0568f05c-1e8c-42ac-bf8e-1637ec47e1f8 service nova] Acquiring lock "8be7e64c-7bc6-41a0-ada5-0a5057a2af45-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 925.189924] env[69927]: DEBUG oslo_concurrency.lockutils [req-b208d32c-ec8a-4140-ad90-29cc6689498b req-0568f05c-1e8c-42ac-bf8e-1637ec47e1f8 service nova] Lock "8be7e64c-7bc6-41a0-ada5-0a5057a2af45-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 925.190251] env[69927]: DEBUG oslo_concurrency.lockutils [req-b208d32c-ec8a-4140-ad90-29cc6689498b req-0568f05c-1e8c-42ac-bf8e-1637ec47e1f8 service nova] Lock "8be7e64c-7bc6-41a0-ada5-0a5057a2af45-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.190572] env[69927]: DEBUG nova.compute.manager [req-b208d32c-ec8a-4140-ad90-29cc6689498b req-0568f05c-1e8c-42ac-bf8e-1637ec47e1f8 service nova] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] No waiting events found dispatching network-vif-plugged-e6bbd21d-3bfb-40a1-ab40-d734248c04fb {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 925.190951] env[69927]: WARNING nova.compute.manager [req-b208d32c-ec8a-4140-ad90-29cc6689498b req-0568f05c-1e8c-42ac-bf8e-1637ec47e1f8 service nova] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Received unexpected event network-vif-plugged-e6bbd21d-3bfb-40a1-ab40-d734248c04fb for instance with vm_state building and task_state spawning. [ 925.191290] env[69927]: DEBUG nova.compute.manager [req-b208d32c-ec8a-4140-ad90-29cc6689498b req-0568f05c-1e8c-42ac-bf8e-1637ec47e1f8 service nova] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Received event network-changed-e6bbd21d-3bfb-40a1-ab40-d734248c04fb {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 925.191577] env[69927]: DEBUG nova.compute.manager [req-b208d32c-ec8a-4140-ad90-29cc6689498b req-0568f05c-1e8c-42ac-bf8e-1637ec47e1f8 service nova] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Refreshing instance network info cache due to event network-changed-e6bbd21d-3bfb-40a1-ab40-d734248c04fb. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 925.191905] env[69927]: DEBUG oslo_concurrency.lockutils [req-b208d32c-ec8a-4140-ad90-29cc6689498b req-0568f05c-1e8c-42ac-bf8e-1637ec47e1f8 service nova] Acquiring lock "refresh_cache-8be7e64c-7bc6-41a0-ada5-0a5057a2af45" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.338885] env[69927]: DEBUG nova.network.neutron [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 925.427882] env[69927]: DEBUG oslo_vmware.api [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Task: {'id': task-4096090, 'name': ReconfigVM_Task, 'duration_secs': 0.709084} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.428217] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Reconfigured VM instance instance-0000003b to attach disk [datastore2] 50eedb80-d4bc-42c4-9686-6549cbd675b7/50eedb80-d4bc-42c4-9686-6549cbd675b7.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 925.428850] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd29cf0d-74ed-4d55-b46a-9eba413014fa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.437178] env[69927]: DEBUG oslo_vmware.api [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Waiting for the task: (returnval){ [ 925.437178] env[69927]: value = "task-4096094" [ 925.437178] env[69927]: _type = "Task" [ 925.437178] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.449691] env[69927]: DEBUG oslo_vmware.api [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Task: {'id': task-4096094, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.453517] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b983aff-02fb-45a9-bd9c-9f9041868654 tempest-FloatingIPsAssociationTestJSON-1358918757 tempest-FloatingIPsAssociationTestJSON-1358918757-project-member] Lock "a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.340s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.683084] env[69927]: DEBUG nova.network.neutron [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Updating instance_info_cache with network_info: [{"id": "e6bbd21d-3bfb-40a1-ab40-d734248c04fb", "address": "fa:16:3e:c4:b7:0c", "network": {"id": "77b7df28-cd49-4d70-bd52-38aa177e9bb4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-543587161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5551a241903f4911b27b7f4ab1c2f29d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a1439ce-fa5c-420d-bcf0-083f4cc002cd", "external-id": "nsx-vlan-transportzone-413", "segmentation_id": 413, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6bbd21d-3b", "ovs_interfaceid": "e6bbd21d-3bfb-40a1-ab40-d734248c04fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.855219] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 925.855552] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5bba3ffb-761e-4339-9b5b-4b7f92ca36e3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.864475] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Waiting for the task: (returnval){ [ 925.864475] env[69927]: value = "task-4096095" [ 925.864475] env[69927]: _type = "Task" [ 925.864475] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.875021] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096095, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.919644] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08f77c24-fdeb-49d8-851f-6472b59a2800 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.923379] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.182s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 925.923379] env[69927]: DEBUG nova.objects.instance [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lazy-loading 'resources' on Instance uuid 66ba8086-2dd4-4d02-aac3-1bbb4a404784 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 925.949457] env[69927]: DEBUG oslo_vmware.api [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Task: {'id': task-4096094, 'name': Rename_Task, 'duration_secs': 0.149355} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.949457] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 925.949577] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-39465dc1-6ad5-42e6-af58-f6d7b68fdf57 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.957234] env[69927]: DEBUG oslo_vmware.api [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Waiting for the task: (returnval){ [ 925.957234] env[69927]: value = "task-4096096" [ 925.957234] env[69927]: _type = "Task" [ 925.957234] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.966727] env[69927]: DEBUG oslo_vmware.api [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Task: {'id': task-4096096, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.187229] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Releasing lock "refresh_cache-8be7e64c-7bc6-41a0-ada5-0a5057a2af45" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.187464] env[69927]: DEBUG nova.compute.manager [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Instance network_info: |[{"id": "e6bbd21d-3bfb-40a1-ab40-d734248c04fb", "address": "fa:16:3e:c4:b7:0c", "network": {"id": "77b7df28-cd49-4d70-bd52-38aa177e9bb4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-543587161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5551a241903f4911b27b7f4ab1c2f29d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a1439ce-fa5c-420d-bcf0-083f4cc002cd", "external-id": "nsx-vlan-transportzone-413", "segmentation_id": 413, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6bbd21d-3b", "ovs_interfaceid": "e6bbd21d-3bfb-40a1-ab40-d734248c04fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 926.187820] env[69927]: DEBUG oslo_concurrency.lockutils [req-b208d32c-ec8a-4140-ad90-29cc6689498b req-0568f05c-1e8c-42ac-bf8e-1637ec47e1f8 service nova] Acquired lock "refresh_cache-8be7e64c-7bc6-41a0-ada5-0a5057a2af45" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 926.187962] env[69927]: DEBUG nova.network.neutron [req-b208d32c-ec8a-4140-ad90-29cc6689498b req-0568f05c-1e8c-42ac-bf8e-1637ec47e1f8 service nova] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Refreshing network info cache for port e6bbd21d-3bfb-40a1-ab40-d734248c04fb {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 926.189397] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:b7:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0a1439ce-fa5c-420d-bcf0-083f4cc002cd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6bbd21d-3bfb-40a1-ab40-d734248c04fb', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 926.199026] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 926.202850] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 926.203524] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7619f99c-db5c-43c7-bf51-1b2e67d96ad7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.227890] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 926.227890] env[69927]: value = "task-4096097" [ 926.227890] env[69927]: _type = "Task" [ 926.227890] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.239341] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096097, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.376070] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096095, 'name': PowerOffVM_Task, 'duration_secs': 0.196072} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.376336] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 926.377329] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 926.377850] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a7a2cf-16c4-4430-b8e0-33902afbc762 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.388115] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 926.388431] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1e3c2999-aaaa-47fc-8fca-9bec99181a7c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.418287] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 926.418618] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 926.418907] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Deleting the datastore file [datastore1] 9363c664-5848-408b-9b03-2dea4ceded90 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 926.419318] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-05705e4c-edb9-4868-9ab5-923466a1f6ee {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.435964] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Waiting for the task: (returnval){ [ 926.435964] env[69927]: value = "task-4096099" [ 926.435964] env[69927]: _type = "Task" [ 926.435964] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.461116] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096099, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.475259] env[69927]: DEBUG oslo_vmware.api [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Task: {'id': task-4096096, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.588516] env[69927]: DEBUG nova.network.neutron [req-b208d32c-ec8a-4140-ad90-29cc6689498b req-0568f05c-1e8c-42ac-bf8e-1637ec47e1f8 service nova] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Updated VIF entry in instance network info cache for port e6bbd21d-3bfb-40a1-ab40-d734248c04fb. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 926.588516] env[69927]: DEBUG nova.network.neutron [req-b208d32c-ec8a-4140-ad90-29cc6689498b req-0568f05c-1e8c-42ac-bf8e-1637ec47e1f8 service nova] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Updating instance_info_cache with network_info: [{"id": "e6bbd21d-3bfb-40a1-ab40-d734248c04fb", "address": "fa:16:3e:c4:b7:0c", "network": {"id": "77b7df28-cd49-4d70-bd52-38aa177e9bb4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-543587161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5551a241903f4911b27b7f4ab1c2f29d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a1439ce-fa5c-420d-bcf0-083f4cc002cd", "external-id": "nsx-vlan-transportzone-413", "segmentation_id": 413, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6bbd21d-3b", "ovs_interfaceid": "e6bbd21d-3bfb-40a1-ab40-d734248c04fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.742760] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096097, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.948118] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096099, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.311954} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.948118] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 926.948118] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 926.948754] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 926.975031] env[69927]: DEBUG oslo_vmware.api [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Task: {'id': task-4096096, 'name': PowerOnVM_Task, 'duration_secs': 0.708597} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.977443] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 926.977892] env[69927]: INFO nova.compute.manager [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Took 8.75 seconds to spawn the instance on the hypervisor. [ 926.978314] env[69927]: DEBUG nova.compute.manager [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 926.979641] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-269684bf-2137-49b3-8aef-2df649b8c4df {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.078846] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a42d7e-6316-41ad-9778-26b23af14b6d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.087033] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adff54bb-67a2-4f5e-ab9d-1025c1022fea {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.124739] env[69927]: DEBUG oslo_concurrency.lockutils [req-b208d32c-ec8a-4140-ad90-29cc6689498b req-0568f05c-1e8c-42ac-bf8e-1637ec47e1f8 service nova] Releasing lock "refresh_cache-8be7e64c-7bc6-41a0-ada5-0a5057a2af45" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 927.125403] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquiring lock "e1946033-4ec3-4561-afdf-a3b748f7c611" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.126022] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Lock "e1946033-4ec3-4561-afdf-a3b748f7c611" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.126330] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquiring lock "e1946033-4ec3-4561-afdf-a3b748f7c611-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.126553] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Lock "e1946033-4ec3-4561-afdf-a3b748f7c611-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.126773] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Lock "e1946033-4ec3-4561-afdf-a3b748f7c611-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.129992] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593c163b-26ff-4014-88e1-87d7026003fd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.136022] env[69927]: INFO nova.compute.manager [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Terminating instance [ 927.146150] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9627351f-18d6-4139-8056-70fa3a57d9a9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.167090] env[69927]: DEBUG nova.compute.provider_tree [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 927.239924] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096097, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.280226] env[69927]: DEBUG nova.network.neutron [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Port 32049b49-d761-48ff-8938-d76ebe86f62e binding to destination host cpu-1 is already ACTIVE {{(pid=69927) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 927.280515] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "refresh_cache-c3e8a429-8484-4b11-abe3-1cccf0992556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.280667] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquired lock "refresh_cache-c3e8a429-8484-4b11-abe3-1cccf0992556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 927.280833] env[69927]: DEBUG nova.network.neutron [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 927.508688] env[69927]: INFO nova.compute.manager [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Took 48.06 seconds to build instance. [ 927.632170] env[69927]: DEBUG nova.compute.manager [req-1d31f31b-e4b0-4777-a532-292475d7a969 req-c8942e57-e4cf-4428-8b0e-788e04b4fd1a service nova] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Received event network-changed-6e95f83c-01f6-4bbe-800b-ed805d60f684 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 927.633127] env[69927]: DEBUG nova.compute.manager [req-1d31f31b-e4b0-4777-a532-292475d7a969 req-c8942e57-e4cf-4428-8b0e-788e04b4fd1a service nova] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Refreshing instance network info cache due to event network-changed-6e95f83c-01f6-4bbe-800b-ed805d60f684. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 927.633127] env[69927]: DEBUG oslo_concurrency.lockutils [req-1d31f31b-e4b0-4777-a532-292475d7a969 req-c8942e57-e4cf-4428-8b0e-788e04b4fd1a service nova] Acquiring lock "refresh_cache-e1946033-4ec3-4561-afdf-a3b748f7c611" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.633127] env[69927]: DEBUG oslo_concurrency.lockutils [req-1d31f31b-e4b0-4777-a532-292475d7a969 req-c8942e57-e4cf-4428-8b0e-788e04b4fd1a service nova] Acquired lock "refresh_cache-e1946033-4ec3-4561-afdf-a3b748f7c611" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 927.633127] env[69927]: DEBUG nova.network.neutron [req-1d31f31b-e4b0-4777-a532-292475d7a969 req-c8942e57-e4cf-4428-8b0e-788e04b4fd1a service nova] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Refreshing network info cache for port 6e95f83c-01f6-4bbe-800b-ed805d60f684 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 927.644454] env[69927]: DEBUG nova.compute.manager [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 927.644454] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 927.644454] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d5c28cb-a338-4c08-a791-e2c64435362a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.655178] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 927.655393] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bfa151f1-5f1a-4d4b-b210-686fd9b7b308 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.662607] env[69927]: DEBUG oslo_vmware.api [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Waiting for the task: (returnval){ [ 927.662607] env[69927]: value = "task-4096101" [ 927.662607] env[69927]: _type = "Task" [ 927.662607] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.675565] env[69927]: DEBUG oslo_vmware.api [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4096101, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.709427] env[69927]: DEBUG nova.scheduler.client.report [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Updated inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 with generation 90 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 927.709741] env[69927]: DEBUG nova.compute.provider_tree [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Updating resource provider 2f529b36-df5f-4b37-8103-68f74f737726 generation from 90 to 91 during operation: update_inventory {{(pid=69927) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 927.709935] env[69927]: DEBUG nova.compute.provider_tree [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 927.741644] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096097, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.004219] env[69927]: DEBUG nova.virt.hardware [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 928.004482] env[69927]: DEBUG nova.virt.hardware [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 928.004615] env[69927]: DEBUG nova.virt.hardware [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 928.004797] env[69927]: DEBUG nova.virt.hardware [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 928.004941] env[69927]: DEBUG nova.virt.hardware [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 928.005098] env[69927]: DEBUG nova.virt.hardware [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 928.005311] env[69927]: DEBUG nova.virt.hardware [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 928.005609] env[69927]: DEBUG nova.virt.hardware [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 928.005832] env[69927]: DEBUG nova.virt.hardware [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 928.009015] env[69927]: DEBUG nova.virt.hardware [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 928.009015] env[69927]: DEBUG nova.virt.hardware [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 928.009015] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a460bab9-0024-48e8-887e-7942b44bab65 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.012532] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d4deab20-5b5c-4283-ab60-41cbaf353a32 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Lock "50eedb80-d4bc-42c4-9686-6549cbd675b7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.740s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 928.020523] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f4b073-79d2-4930-aa6c-56e70ec2135f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.037066] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Instance VIF info [] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 928.042529] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 928.045828] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 928.046150] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e78b35e1-5518-4e90-8f09-d159eb928e8a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.071205] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 928.071205] env[69927]: value = "task-4096102" [ 928.071205] env[69927]: _type = "Task" [ 928.071205] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.083428] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096102, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.153581] env[69927]: DEBUG nova.network.neutron [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Updating instance_info_cache with network_info: [{"id": "32049b49-d761-48ff-8938-d76ebe86f62e", "address": "fa:16:3e:64:c8:10", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.140", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32049b49-d7", "ovs_interfaceid": "32049b49-d761-48ff-8938-d76ebe86f62e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.177623] env[69927]: DEBUG oslo_vmware.api [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4096101, 'name': PowerOffVM_Task, 'duration_secs': 0.270492} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.177910] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 928.178093] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 928.178386] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ba036808-11cc-404a-b5cf-70d7e30b8ee0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.216160] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.295s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 928.219206] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.824s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 928.221231] env[69927]: INFO nova.compute.claims [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 928.241256] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096097, 'name': CreateVM_Task, 'duration_secs': 1.91996} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.241432] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 928.242134] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.243382] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 928.243382] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 928.243382] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a6f6971-0571-4336-b217-e1826ca150ac {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.250870] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 928.250870] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5205ef71-9bb3-9511-30bd-5db8ca2bb158" [ 928.250870] env[69927]: _type = "Task" [ 928.250870] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.259170] env[69927]: INFO nova.scheduler.client.report [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Deleted allocations for instance 66ba8086-2dd4-4d02-aac3-1bbb4a404784 [ 928.264316] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5205ef71-9bb3-9511-30bd-5db8ca2bb158, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.283305] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 928.283593] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 928.283898] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Deleting the datastore file [datastore2] e1946033-4ec3-4561-afdf-a3b748f7c611 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 928.284269] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3b37fc88-0b53-4b51-a666-a839ebed3c21 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.298086] env[69927]: DEBUG oslo_vmware.api [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Waiting for the task: (returnval){ [ 928.298086] env[69927]: value = "task-4096104" [ 928.298086] env[69927]: _type = "Task" [ 928.298086] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.311756] env[69927]: DEBUG oslo_vmware.api [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4096104, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.480103] env[69927]: DEBUG nova.network.neutron [req-1d31f31b-e4b0-4777-a532-292475d7a969 req-c8942e57-e4cf-4428-8b0e-788e04b4fd1a service nova] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Updated VIF entry in instance network info cache for port 6e95f83c-01f6-4bbe-800b-ed805d60f684. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 928.480504] env[69927]: DEBUG nova.network.neutron [req-1d31f31b-e4b0-4777-a532-292475d7a969 req-c8942e57-e4cf-4428-8b0e-788e04b4fd1a service nova] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Updating instance_info_cache with network_info: [{"id": "6e95f83c-01f6-4bbe-800b-ed805d60f684", "address": "fa:16:3e:85:c5:49", "network": {"id": "8b41ee82-5412-4d31-ae76-47e8663487eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-523653041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66cbe813421e40d1bd515411bc3c045a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ffb921-796a-40fe-9662-d3fc01547dcb", "external-id": "nsx-vlan-transportzone-331", "segmentation_id": 331, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e95f83c-01", "ovs_interfaceid": "6e95f83c-01f6-4bbe-800b-ed805d60f684", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.517139] env[69927]: DEBUG nova.compute.manager [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 928.582751] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096102, 'name': CreateVM_Task, 'duration_secs': 0.366699} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.582927] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 928.583400] env[69927]: DEBUG oslo_concurrency.lockutils [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.657069] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Releasing lock "refresh_cache-c3e8a429-8484-4b11-abe3-1cccf0992556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 928.762816] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5205ef71-9bb3-9511-30bd-5db8ca2bb158, 'name': SearchDatastore_Task, 'duration_secs': 0.014224} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.763209] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 928.763393] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 928.763598] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.763749] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 928.763929] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 928.764252] env[69927]: DEBUG oslo_concurrency.lockutils [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 928.764681] env[69927]: DEBUG oslo_concurrency.lockutils [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 928.766065] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18428db5-ac5b-4d01-b240-daa7095a3139 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.767124] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68b4a439-cc17-4a89-b3fa-eebf35691600 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.775157] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a0693cd8-1db0-4382-b1a9-a29ee2429633 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "66ba8086-2dd4-4d02-aac3-1bbb4a404784" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.161s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 928.782401] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Waiting for the task: (returnval){ [ 928.782401] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5237e78c-24df-8dff-3b7d-7c7403e297c8" [ 928.782401] env[69927]: _type = "Task" [ 928.782401] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.787025] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 928.787025] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 928.790909] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85268e09-5a7f-4a7b-bd01-03974ef3d031 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.808927] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5237e78c-24df-8dff-3b7d-7c7403e297c8, 'name': SearchDatastore_Task, 'duration_secs': 0.021085} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.809325] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 928.809325] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f4ff22-fee7-e8b9-9756-e7e1a5365c34" [ 928.809325] env[69927]: _type = "Task" [ 928.809325] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.810040] env[69927]: DEBUG oslo_concurrency.lockutils [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 928.810306] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 928.810552] env[69927]: DEBUG oslo_concurrency.lockutils [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.818609] env[69927]: DEBUG oslo_vmware.api [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4096104, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.319778} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.819360] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 928.819539] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 928.819736] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 928.819930] env[69927]: INFO nova.compute.manager [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Took 1.18 seconds to destroy the instance on the hypervisor. [ 928.820256] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 928.820431] env[69927]: DEBUG nova.compute.manager [-] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 928.820487] env[69927]: DEBUG nova.network.neutron [-] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 928.826025] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f4ff22-fee7-e8b9-9756-e7e1a5365c34, 'name': SearchDatastore_Task, 'duration_secs': 0.010826} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.827542] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb183477-9530-4d95-a5be-fd73a361631f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.833908] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 928.833908] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52cec3d4-dffd-3d85-0efa-defacc4a8e49" [ 928.833908] env[69927]: _type = "Task" [ 928.833908] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.842608] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52cec3d4-dffd-3d85-0efa-defacc4a8e49, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.983328] env[69927]: DEBUG oslo_concurrency.lockutils [req-1d31f31b-e4b0-4777-a532-292475d7a969 req-c8942e57-e4cf-4428-8b0e-788e04b4fd1a service nova] Releasing lock "refresh_cache-e1946033-4ec3-4561-afdf-a3b748f7c611" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 929.051231] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.160552] env[69927]: DEBUG nova.compute.manager [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=69927) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:900}} [ 929.160725] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.349861] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52cec3d4-dffd-3d85-0efa-defacc4a8e49, 'name': SearchDatastore_Task, 'duration_secs': 0.014217} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.353079] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 929.353589] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 8be7e64c-7bc6-41a0-ada5-0a5057a2af45/8be7e64c-7bc6-41a0-ada5-0a5057a2af45.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 929.354306] env[69927]: DEBUG oslo_concurrency.lockutils [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 929.354672] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 929.355056] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-72114be1-9705-454e-bac4-875edb62a455 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.358217] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b843cddd-e363-4ff4-9e87-985372c4cfbb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.372201] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 929.372201] env[69927]: value = "task-4096105" [ 929.372201] env[69927]: _type = "Task" [ 929.372201] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.372489] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 929.373099] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 929.373744] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4c18782-181a-4e0c-aa5d-a6f8c288ed2e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.387648] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Waiting for the task: (returnval){ [ 929.387648] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525e5479-9649-dcf8-ef63-2d461d513e05" [ 929.387648] env[69927]: _type = "Task" [ 929.387648] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.391286] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096105, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.401788] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525e5479-9649-dcf8-ef63-2d461d513e05, 'name': SearchDatastore_Task, 'duration_secs': 0.011588} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.402826] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac93a49a-b3b5-4274-987a-8d9c33c5ed13 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.409937] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Waiting for the task: (returnval){ [ 929.409937] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f0c348-7685-7fb7-0061-5e82732e3911" [ 929.409937] env[69927]: _type = "Task" [ 929.409937] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.422955] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f0c348-7685-7fb7-0061-5e82732e3911, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.535415] env[69927]: DEBUG nova.compute.manager [req-bd842853-7031-440d-a4b1-aac2808788f0 req-98b20753-be93-49e6-a257-6bd4e7c9b029 service nova] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Received event network-vif-deleted-6e95f83c-01f6-4bbe-800b-ed805d60f684 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 929.535606] env[69927]: INFO nova.compute.manager [req-bd842853-7031-440d-a4b1-aac2808788f0 req-98b20753-be93-49e6-a257-6bd4e7c9b029 service nova] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Neutron deleted interface 6e95f83c-01f6-4bbe-800b-ed805d60f684; detaching it from the instance and deleting it from the info cache [ 929.535793] env[69927]: DEBUG nova.network.neutron [req-bd842853-7031-440d-a4b1-aac2808788f0 req-98b20753-be93-49e6-a257-6bd4e7c9b029 service nova] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.588263] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fb60ab6-5646-4591-9454-29b0de24efc2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Volume attach. Driver type: vmdk {{(pid=69927) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 929.588942] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fb60ab6-5646-4591-9454-29b0de24efc2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811465', 'volume_id': 'bcac983c-b893-4914-9a04-f6f06dd0347e', 'name': 'volume-bcac983c-b893-4914-9a04-f6f06dd0347e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e1b3d0bc-a251-4dbd-89a6-216a2f2c1313', 'attached_at': '', 'detached_at': '', 'volume_id': 'bcac983c-b893-4914-9a04-f6f06dd0347e', 'serial': 'bcac983c-b893-4914-9a04-f6f06dd0347e'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 929.592045] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e72a3ac8-b1ee-49a7-864f-e08156851adf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.610071] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f1fb0a-47cb-4672-9bc8-2aed01e2ff62 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.638028] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fb60ab6-5646-4591-9454-29b0de24efc2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] volume-bcac983c-b893-4914-9a04-f6f06dd0347e/volume-bcac983c-b893-4914-9a04-f6f06dd0347e.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 929.641312] env[69927]: DEBUG nova.network.neutron [-] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.642452] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9a3842f-14e9-4a0b-b3c6-6a48f7f97eb9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.663823] env[69927]: DEBUG oslo_vmware.api [None req-0fb60ab6-5646-4591-9454-29b0de24efc2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 929.663823] env[69927]: value = "task-4096106" [ 929.663823] env[69927]: _type = "Task" [ 929.663823] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.677344] env[69927]: DEBUG oslo_vmware.api [None req-0fb60ab6-5646-4591-9454-29b0de24efc2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096106, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.885018] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096105, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.888603] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-506f49a7-317c-4287-83a0-7df6d74d8f86 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.900558] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c184d075-f5f4-4252-8941-82165641f48e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.940198] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-defb01b7-4637-4009-a894-06828d7f58e8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.951422] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f0c348-7685-7fb7-0061-5e82732e3911, 'name': SearchDatastore_Task, 'duration_secs': 0.013228} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.954010] env[69927]: DEBUG oslo_concurrency.lockutils [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 929.954288] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 9363c664-5848-408b-9b03-2dea4ceded90/9363c664-5848-408b-9b03-2dea4ceded90.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 929.954610] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-983b8fa9-2c5d-4b9a-9b8e-07b07aa12953 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.958072] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d467c32-2140-429c-8d55-b509a0a4bdcd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.978715] env[69927]: DEBUG nova.compute.provider_tree [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 929.981251] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Waiting for the task: (returnval){ [ 929.981251] env[69927]: value = "task-4096107" [ 929.981251] env[69927]: _type = "Task" [ 929.981251] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.033503] env[69927]: DEBUG nova.compute.manager [req-38273eb1-43c0-4ed1-a923-9f76c1190fca req-8310e65f-c124-402b-be30-6d97c1db0357 service nova] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Received event network-changed-fa521cbd-9783-45a7-b712-2e14b9bf5139 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 930.034340] env[69927]: DEBUG nova.compute.manager [req-38273eb1-43c0-4ed1-a923-9f76c1190fca req-8310e65f-c124-402b-be30-6d97c1db0357 service nova] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Refreshing instance network info cache due to event network-changed-fa521cbd-9783-45a7-b712-2e14b9bf5139. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 930.034340] env[69927]: DEBUG oslo_concurrency.lockutils [req-38273eb1-43c0-4ed1-a923-9f76c1190fca req-8310e65f-c124-402b-be30-6d97c1db0357 service nova] Acquiring lock "refresh_cache-50eedb80-d4bc-42c4-9686-6549cbd675b7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.035133] env[69927]: DEBUG oslo_concurrency.lockutils [req-38273eb1-43c0-4ed1-a923-9f76c1190fca req-8310e65f-c124-402b-be30-6d97c1db0357 service nova] Acquired lock "refresh_cache-50eedb80-d4bc-42c4-9686-6549cbd675b7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 930.035133] env[69927]: DEBUG nova.network.neutron [req-38273eb1-43c0-4ed1-a923-9f76c1190fca req-8310e65f-c124-402b-be30-6d97c1db0357 service nova] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Refreshing network info cache for port fa521cbd-9783-45a7-b712-2e14b9bf5139 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 930.045163] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c0548e1d-c8c3-471d-96c6-939c79978dcc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.056410] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82bc286d-b3f6-4d5e-8f0b-4ba01169aaa8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.079032] env[69927]: DEBUG oslo_concurrency.lockutils [None req-72639dea-3299-475d-b48a-8e1783bc1cf5 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "2ab21cdb-165b-4b71-a865-1a72cfb430c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.079505] env[69927]: DEBUG oslo_concurrency.lockutils [None req-72639dea-3299-475d-b48a-8e1783bc1cf5 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "2ab21cdb-165b-4b71-a865-1a72cfb430c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.102947] env[69927]: DEBUG nova.compute.manager [req-bd842853-7031-440d-a4b1-aac2808788f0 req-98b20753-be93-49e6-a257-6bd4e7c9b029 service nova] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Detach interface failed, port_id=6e95f83c-01f6-4bbe-800b-ed805d60f684, reason: Instance e1946033-4ec3-4561-afdf-a3b748f7c611 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 930.157561] env[69927]: INFO nova.compute.manager [-] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Took 1.34 seconds to deallocate network for instance. [ 930.174368] env[69927]: DEBUG oslo_vmware.api [None req-0fb60ab6-5646-4591-9454-29b0de24efc2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096106, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.386676] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096105, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.728688} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.386676] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 8be7e64c-7bc6-41a0-ada5-0a5057a2af45/8be7e64c-7bc6-41a0-ada5-0a5057a2af45.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 930.386849] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 930.387565] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9b225956-a633-4cf1-b07d-c0658172bcc1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.396210] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 930.396210] env[69927]: value = "task-4096108" [ 930.396210] env[69927]: _type = "Task" [ 930.396210] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.406909] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096108, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.495618] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096107, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.501755] env[69927]: ERROR nova.scheduler.client.report [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [req-19f630b1-c8be-454a-9ce2-93470c9af99b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2f529b36-df5f-4b37-8103-68f74f737726. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-19f630b1-c8be-454a-9ce2-93470c9af99b"}]} [ 930.522140] env[69927]: DEBUG nova.scheduler.client.report [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Refreshing inventories for resource provider 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 930.540972] env[69927]: DEBUG nova.scheduler.client.report [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Updating ProviderTree inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 930.541123] env[69927]: DEBUG nova.compute.provider_tree [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 930.553829] env[69927]: DEBUG nova.scheduler.client.report [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Refreshing aggregate associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, aggregates: None {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 930.591460] env[69927]: DEBUG nova.scheduler.client.report [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Refreshing trait associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 930.670651] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.680472] env[69927]: DEBUG oslo_vmware.api [None req-0fb60ab6-5646-4591-9454-29b0de24efc2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096106, 'name': ReconfigVM_Task, 'duration_secs': 1.011609} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.683693] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fb60ab6-5646-4591-9454-29b0de24efc2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Reconfigured VM instance instance-0000002a to attach disk [datastore1] volume-bcac983c-b893-4914-9a04-f6f06dd0347e/volume-bcac983c-b893-4914-9a04-f6f06dd0347e.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 930.689052] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3c77b8c-88f8-427c-b2c0-47197a417c58 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.708522] env[69927]: DEBUG oslo_vmware.api [None req-0fb60ab6-5646-4591-9454-29b0de24efc2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 930.708522] env[69927]: value = "task-4096109" [ 930.708522] env[69927]: _type = "Task" [ 930.708522] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.721203] env[69927]: DEBUG oslo_vmware.api [None req-0fb60ab6-5646-4591-9454-29b0de24efc2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096109, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.840079] env[69927]: DEBUG nova.network.neutron [req-38273eb1-43c0-4ed1-a923-9f76c1190fca req-8310e65f-c124-402b-be30-6d97c1db0357 service nova] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Updated VIF entry in instance network info cache for port fa521cbd-9783-45a7-b712-2e14b9bf5139. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 930.840487] env[69927]: DEBUG nova.network.neutron [req-38273eb1-43c0-4ed1-a923-9f76c1190fca req-8310e65f-c124-402b-be30-6d97c1db0357 service nova] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Updating instance_info_cache with network_info: [{"id": "fa521cbd-9783-45a7-b712-2e14b9bf5139", "address": "fa:16:3e:46:d1:03", "network": {"id": "7db2f714-1044-45d6-9e7c-3b7c1524c691", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1880105821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e42b3ace8db6445ab652bc1e2e0a3361", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa521cbd-97", "ovs_interfaceid": "fa521cbd-9783-45a7-b712-2e14b9bf5139", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.908849] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096108, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.241392} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.911509] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 930.912766] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a51dea32-a2c1-4908-ad57-941d9f81d1f3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.936828] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] 8be7e64c-7bc6-41a0-ada5-0a5057a2af45/8be7e64c-7bc6-41a0-ada5-0a5057a2af45.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 930.941040] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8b5d1ed-0e6b-43d7-bb1b-257fa6cd13f1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.962622] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 930.962622] env[69927]: value = "task-4096110" [ 930.962622] env[69927]: _type = "Task" [ 930.962622] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.974258] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096110, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.995818] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096107, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.986275} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.998251] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 9363c664-5848-408b-9b03-2dea4ceded90/9363c664-5848-408b-9b03-2dea4ceded90.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 930.998472] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 930.998917] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e9c635db-f746-486c-bd22-b791aa031dc7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.006356] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Waiting for the task: (returnval){ [ 931.006356] env[69927]: value = "task-4096111" [ 931.006356] env[69927]: _type = "Task" [ 931.006356] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.018043] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096111, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.098786] env[69927]: DEBUG oslo_concurrency.lockutils [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "interface-ff227e07-8e36-48d6-a8c7-1e0087fd1faa-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 931.099054] env[69927]: DEBUG oslo_concurrency.lockutils [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "interface-ff227e07-8e36-48d6-a8c7-1e0087fd1faa-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.099405] env[69927]: DEBUG nova.objects.instance [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lazy-loading 'flavor' on Instance uuid ff227e07-8e36-48d6-a8c7-1e0087fd1faa {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 931.142736] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b303bfa7-d5d9-424e-82d5-f4e415d4d837 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.150422] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28b636fb-dfe0-4da0-82a2-a421d7b5873b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.188134] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d956d757-01b1-4b51-bfd2-ec531274af6e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.196873] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8488c2-8770-4716-b0f5-6830c1334e64 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.215283] env[69927]: DEBUG nova.compute.provider_tree [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 931.225498] env[69927]: DEBUG oslo_vmware.api [None req-0fb60ab6-5646-4591-9454-29b0de24efc2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096109, 'name': ReconfigVM_Task, 'duration_secs': 0.240994} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.226544] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fb60ab6-5646-4591-9454-29b0de24efc2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811465', 'volume_id': 'bcac983c-b893-4914-9a04-f6f06dd0347e', 'name': 'volume-bcac983c-b893-4914-9a04-f6f06dd0347e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e1b3d0bc-a251-4dbd-89a6-216a2f2c1313', 'attached_at': '', 'detached_at': '', 'volume_id': 'bcac983c-b893-4914-9a04-f6f06dd0347e', 'serial': 'bcac983c-b893-4914-9a04-f6f06dd0347e'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 931.347400] env[69927]: DEBUG oslo_concurrency.lockutils [req-38273eb1-43c0-4ed1-a923-9f76c1190fca req-8310e65f-c124-402b-be30-6d97c1db0357 service nova] Releasing lock "refresh_cache-50eedb80-d4bc-42c4-9686-6549cbd675b7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 931.472764] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096110, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.518389] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096111, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069626} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.518694] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 931.519578] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d10916-e0e9-4376-a4bb-c3993e318e99 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.541182] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] 9363c664-5848-408b-9b03-2dea4ceded90/9363c664-5848-408b-9b03-2dea4ceded90.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 931.541531] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1eb8734c-d380-4308-9791-d647a88b4725 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.562970] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Waiting for the task: (returnval){ [ 931.562970] env[69927]: value = "task-4096112" [ 931.562970] env[69927]: _type = "Task" [ 931.562970] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.571311] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096112, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.670418] env[69927]: DEBUG nova.objects.instance [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lazy-loading 'pci_requests' on Instance uuid ff227e07-8e36-48d6-a8c7-1e0087fd1faa {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 931.720852] env[69927]: DEBUG nova.scheduler.client.report [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 931.848732] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "9c00e485-fd59-4571-abd5-80ca5e3bac1b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 931.848985] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "9c00e485-fd59-4571-abd5-80ca5e3bac1b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.974760] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096110, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.075277] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096112, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.173763] env[69927]: DEBUG nova.objects.base [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=69927) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 932.174162] env[69927]: DEBUG nova.network.neutron [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 932.226687] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.008s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.227265] env[69927]: DEBUG nova.compute.manager [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 932.229902] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.513s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 932.230160] env[69927]: DEBUG nova.objects.instance [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lazy-loading 'resources' on Instance uuid b67630a4-2c1a-440b-af82-80c908ffa6e9 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 932.240086] env[69927]: DEBUG nova.policy [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ed20f23b4104e2ea75ea29b804c79d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ed984d7170742eca7e89bf3bf45e6ae', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 932.270662] env[69927]: DEBUG nova.objects.instance [None req-0fb60ab6-5646-4591-9454-29b0de24efc2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lazy-loading 'flavor' on Instance uuid e1b3d0bc-a251-4dbd-89a6-216a2f2c1313 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 932.473586] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096110, 'name': ReconfigVM_Task, 'duration_secs': 1.023442} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.473921] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Reconfigured VM instance instance-0000003c to attach disk [datastore1] 8be7e64c-7bc6-41a0-ada5-0a5057a2af45/8be7e64c-7bc6-41a0-ada5-0a5057a2af45.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 932.474565] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a0979be0-f523-4c09-a6fc-1164c52769aa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.481230] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 932.481230] env[69927]: value = "task-4096113" [ 932.481230] env[69927]: _type = "Task" [ 932.481230] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.489821] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096113, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.546640] env[69927]: DEBUG nova.network.neutron [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Successfully created port: 7c2fe55b-b50d-414d-bc2e-984a899ad2e4 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 932.574828] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096112, 'name': ReconfigVM_Task, 'duration_secs': 0.5658} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.575125] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Reconfigured VM instance instance-0000003a to attach disk [datastore1] 9363c664-5848-408b-9b03-2dea4ceded90/9363c664-5848-408b-9b03-2dea4ceded90.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 932.575743] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-82aa9bbd-7495-4074-8aec-38ad4be366ae {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.582522] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Waiting for the task: (returnval){ [ 932.582522] env[69927]: value = "task-4096114" [ 932.582522] env[69927]: _type = "Task" [ 932.582522] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.591369] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096114, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.734031] env[69927]: DEBUG nova.compute.utils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 932.738622] env[69927]: DEBUG nova.compute.manager [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 932.738803] env[69927]: DEBUG nova.network.neutron [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 932.775820] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0fb60ab6-5646-4591-9454-29b0de24efc2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "e1b3d0bc-a251-4dbd-89a6-216a2f2c1313" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.310s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.817627] env[69927]: DEBUG nova.policy [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '96e9df123ad74df4997941b3942e4330', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5551a241903f4911b27b7f4ab1c2f29d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 932.991571] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096113, 'name': Rename_Task, 'duration_secs': 0.237574} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.991902] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 932.992343] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5fd5dd64-f3a3-42ec-b3c3-59b0b3110538 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.001026] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 933.001026] env[69927]: value = "task-4096115" [ 933.001026] env[69927]: _type = "Task" [ 933.001026] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.013082] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096115, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.093723] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096114, 'name': Rename_Task, 'duration_secs': 0.169418} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.096606] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 933.097119] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c4eec91c-5522-45ca-a763-dce5a51acb2e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.105831] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Waiting for the task: (returnval){ [ 933.105831] env[69927]: value = "task-4096116" [ 933.105831] env[69927]: _type = "Task" [ 933.105831] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.124925] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096116, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.220196] env[69927]: DEBUG nova.network.neutron [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Successfully created port: 0b58ad52-c0bc-42d5-8d95-358aed165658 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 933.239865] env[69927]: DEBUG nova.compute.manager [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 933.334071] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd85d906-c179-437b-b4ce-f62098066243 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.344858] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e5064f7-0219-4b0a-826d-0d0e3a6a4332 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.378275] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdaa7a2e-da4b-425f-a0a2-bb9376b919af {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.386694] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e86e8b1-619e-41b5-a204-1ed039c86885 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.401603] env[69927]: DEBUG nova.compute.provider_tree [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 933.517418] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096115, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.616205] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096116, 'name': PowerOnVM_Task} progress is 78%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.905054] env[69927]: DEBUG nova.scheduler.client.report [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 934.012638] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096115, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.088481] env[69927]: DEBUG nova.network.neutron [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Successfully updated port: 7c2fe55b-b50d-414d-bc2e-984a899ad2e4 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 934.124291] env[69927]: DEBUG oslo_vmware.api [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096116, 'name': PowerOnVM_Task, 'duration_secs': 0.88449} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.124717] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 934.125046] env[69927]: DEBUG nova.compute.manager [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 934.126502] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c760b65-e56c-41ed-b066-3a1ba26d6014 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.251317] env[69927]: DEBUG nova.compute.manager [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 934.280308] env[69927]: DEBUG nova.virt.hardware [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 934.280606] env[69927]: DEBUG nova.virt.hardware [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 934.282252] env[69927]: DEBUG nova.virt.hardware [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 934.282252] env[69927]: DEBUG nova.virt.hardware [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 934.282252] env[69927]: DEBUG nova.virt.hardware [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 934.282252] env[69927]: DEBUG nova.virt.hardware [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 934.282252] env[69927]: DEBUG nova.virt.hardware [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 934.282252] env[69927]: DEBUG nova.virt.hardware [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 934.282252] env[69927]: DEBUG nova.virt.hardware [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 934.282252] env[69927]: DEBUG nova.virt.hardware [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 934.282252] env[69927]: DEBUG nova.virt.hardware [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 934.283388] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e4f1a9f-0253-471e-b98d-d5bea2bc15e6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.294380] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dde4ca0-2aa0-4a88-a9b4-e17ca31a6d5d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.410819] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.181s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.413257] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.280s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.413617] env[69927]: DEBUG nova.objects.instance [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Lazy-loading 'resources' on Instance uuid 5f67d6a0-e4b7-435e-8991-0f54e0379d22 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 934.434795] env[69927]: INFO nova.scheduler.client.report [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Deleted allocations for instance b67630a4-2c1a-440b-af82-80c908ffa6e9 [ 934.513009] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096115, 'name': PowerOnVM_Task, 'duration_secs': 1.365618} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.513321] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 934.513528] env[69927]: INFO nova.compute.manager [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Took 11.24 seconds to spawn the instance on the hypervisor. [ 934.513942] env[69927]: DEBUG nova.compute.manager [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 934.514812] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cac30950-8904-4c94-b4d1-bd9d0bcea5fc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.591071] env[69927]: DEBUG oslo_concurrency.lockutils [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "refresh_cache-ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.591342] env[69927]: DEBUG oslo_concurrency.lockutils [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "refresh_cache-ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 934.591467] env[69927]: DEBUG nova.network.neutron [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 934.644634] env[69927]: DEBUG oslo_concurrency.lockutils [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.944542] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8aabfc2b-4378-470b-8b13-822f0f1bcd06 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "b67630a4-2c1a-440b-af82-80c908ffa6e9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.439s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.956792] env[69927]: DEBUG nova.network.neutron [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Successfully updated port: 0b58ad52-c0bc-42d5-8d95-358aed165658 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 934.965240] env[69927]: DEBUG nova.compute.manager [req-5bf75319-c012-4c0e-b27e-535e9dd3ed74 req-defb1cf9-6e2c-401d-8680-d40794a7a590 service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Received event network-vif-plugged-7c2fe55b-b50d-414d-bc2e-984a899ad2e4 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 934.965240] env[69927]: DEBUG oslo_concurrency.lockutils [req-5bf75319-c012-4c0e-b27e-535e9dd3ed74 req-defb1cf9-6e2c-401d-8680-d40794a7a590 service nova] Acquiring lock "ff227e07-8e36-48d6-a8c7-1e0087fd1faa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.965605] env[69927]: DEBUG oslo_concurrency.lockutils [req-5bf75319-c012-4c0e-b27e-535e9dd3ed74 req-defb1cf9-6e2c-401d-8680-d40794a7a590 service nova] Lock "ff227e07-8e36-48d6-a8c7-1e0087fd1faa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.965965] env[69927]: DEBUG oslo_concurrency.lockutils [req-5bf75319-c012-4c0e-b27e-535e9dd3ed74 req-defb1cf9-6e2c-401d-8680-d40794a7a590 service nova] Lock "ff227e07-8e36-48d6-a8c7-1e0087fd1faa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.966307] env[69927]: DEBUG nova.compute.manager [req-5bf75319-c012-4c0e-b27e-535e9dd3ed74 req-defb1cf9-6e2c-401d-8680-d40794a7a590 service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] No waiting events found dispatching network-vif-plugged-7c2fe55b-b50d-414d-bc2e-984a899ad2e4 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 934.967115] env[69927]: WARNING nova.compute.manager [req-5bf75319-c012-4c0e-b27e-535e9dd3ed74 req-defb1cf9-6e2c-401d-8680-d40794a7a590 service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Received unexpected event network-vif-plugged-7c2fe55b-b50d-414d-bc2e-984a899ad2e4 for instance with vm_state active and task_state None. [ 935.035585] env[69927]: INFO nova.compute.manager [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Took 52.44 seconds to build instance. [ 935.127629] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "7554b5e2-dcc3-421f-9fe9-a309c9aa03b7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.128047] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "7554b5e2-dcc3-421f-9fe9-a309c9aa03b7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.132938] env[69927]: WARNING nova.network.neutron [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] b8b342c3-e0d7-4186-9541-03e865142f8a already exists in list: networks containing: ['b8b342c3-e0d7-4186-9541-03e865142f8a']. ignoring it [ 935.443110] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817d2bce-e7c5-4a9b-887b-208f8570b253 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.452216] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20c1dc0-3fd4-4629-a392-1b1eadd57c6f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.486193] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "refresh_cache-358ecaef-37f0-42be-acce-00f389650c97" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.489200] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquired lock "refresh_cache-358ecaef-37f0-42be-acce-00f389650c97" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 935.489200] env[69927]: DEBUG nova.network.neutron [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 935.489200] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c017537-2f4a-4435-957a-552e5911f4c3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.498473] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8ef56d-8bc6-4b86-9ab2-1f49a8814715 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.513177] env[69927]: DEBUG nova.compute.provider_tree [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 935.536591] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "8be7e64c-7bc6-41a0-ada5-0a5057a2af45" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.080s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.792290] env[69927]: DEBUG nova.network.neutron [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Updating instance_info_cache with network_info: [{"id": "ef273cd9-400b-409d-b689-297040f2ca04", "address": "fa:16:3e:3c:b9:fd", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef273cd9-40", "ovs_interfaceid": "ef273cd9-400b-409d-b689-297040f2ca04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7c2fe55b-b50d-414d-bc2e-984a899ad2e4", "address": "fa:16:3e:ec:ab:07", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c2fe55b-b5", "ovs_interfaceid": "7c2fe55b-b50d-414d-bc2e-984a899ad2e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.973031] env[69927]: DEBUG nova.compute.manager [req-b4bdfa45-3e0c-4fca-8e60-0f9bb33432ac req-eddf6a03-4768-433c-8bbf-baf58b8ce60e service nova] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Received event network-vif-plugged-0b58ad52-c0bc-42d5-8d95-358aed165658 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 935.973336] env[69927]: DEBUG oslo_concurrency.lockutils [req-b4bdfa45-3e0c-4fca-8e60-0f9bb33432ac req-eddf6a03-4768-433c-8bbf-baf58b8ce60e service nova] Acquiring lock "358ecaef-37f0-42be-acce-00f389650c97-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.973336] env[69927]: DEBUG oslo_concurrency.lockutils [req-b4bdfa45-3e0c-4fca-8e60-0f9bb33432ac req-eddf6a03-4768-433c-8bbf-baf58b8ce60e service nova] Lock "358ecaef-37f0-42be-acce-00f389650c97-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.973491] env[69927]: DEBUG oslo_concurrency.lockutils [req-b4bdfa45-3e0c-4fca-8e60-0f9bb33432ac req-eddf6a03-4768-433c-8bbf-baf58b8ce60e service nova] Lock "358ecaef-37f0-42be-acce-00f389650c97-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.974280] env[69927]: DEBUG nova.compute.manager [req-b4bdfa45-3e0c-4fca-8e60-0f9bb33432ac req-eddf6a03-4768-433c-8bbf-baf58b8ce60e service nova] [instance: 358ecaef-37f0-42be-acce-00f389650c97] No waiting events found dispatching network-vif-plugged-0b58ad52-c0bc-42d5-8d95-358aed165658 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 935.974280] env[69927]: WARNING nova.compute.manager [req-b4bdfa45-3e0c-4fca-8e60-0f9bb33432ac req-eddf6a03-4768-433c-8bbf-baf58b8ce60e service nova] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Received unexpected event network-vif-plugged-0b58ad52-c0bc-42d5-8d95-358aed165658 for instance with vm_state building and task_state spawning. [ 936.016653] env[69927]: DEBUG nova.scheduler.client.report [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 936.034457] env[69927]: DEBUG nova.network.neutron [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 936.039585] env[69927]: DEBUG nova.compute.manager [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 936.236714] env[69927]: DEBUG nova.network.neutron [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Updating instance_info_cache with network_info: [{"id": "0b58ad52-c0bc-42d5-8d95-358aed165658", "address": "fa:16:3e:c0:2e:d7", "network": {"id": "77b7df28-cd49-4d70-bd52-38aa177e9bb4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-543587161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5551a241903f4911b27b7f4ab1c2f29d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a1439ce-fa5c-420d-bcf0-083f4cc002cd", "external-id": "nsx-vlan-transportzone-413", "segmentation_id": 413, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b58ad52-c0", "ovs_interfaceid": "0b58ad52-c0bc-42d5-8d95-358aed165658", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.295319] env[69927]: DEBUG oslo_concurrency.lockutils [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "refresh_cache-ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 936.296199] env[69927]: DEBUG oslo_concurrency.lockutils [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.296364] env[69927]: DEBUG oslo_concurrency.lockutils [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 936.297268] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5479275-bbeb-4d89-a1ce-82e34c3a708d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.318832] env[69927]: DEBUG nova.virt.hardware [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 936.319103] env[69927]: DEBUG nova.virt.hardware [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 936.319226] env[69927]: DEBUG nova.virt.hardware [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 936.319414] env[69927]: DEBUG nova.virt.hardware [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 936.319585] env[69927]: DEBUG nova.virt.hardware [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 936.319692] env[69927]: DEBUG nova.virt.hardware [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 936.319893] env[69927]: DEBUG nova.virt.hardware [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 936.320064] env[69927]: DEBUG nova.virt.hardware [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 936.320238] env[69927]: DEBUG nova.virt.hardware [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 936.320400] env[69927]: DEBUG nova.virt.hardware [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 936.320572] env[69927]: DEBUG nova.virt.hardware [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 936.327093] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Reconfiguring VM to attach interface {{(pid=69927) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 936.327371] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8271af04-e346-41e9-bd81-d8ff33c235ab {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.346300] env[69927]: DEBUG oslo_vmware.api [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 936.346300] env[69927]: value = "task-4096117" [ 936.346300] env[69927]: _type = "Task" [ 936.346300] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.355411] env[69927]: DEBUG oslo_vmware.api [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096117, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.522564] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.109s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.526166] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.636s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.528239] env[69927]: INFO nova.compute.claims [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 936.552988] env[69927]: INFO nova.scheduler.client.report [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Deleted allocations for instance 5f67d6a0-e4b7-435e-8991-0f54e0379d22 [ 936.562259] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.740343] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Releasing lock "refresh_cache-358ecaef-37f0-42be-acce-00f389650c97" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 936.740691] env[69927]: DEBUG nova.compute.manager [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Instance network_info: |[{"id": "0b58ad52-c0bc-42d5-8d95-358aed165658", "address": "fa:16:3e:c0:2e:d7", "network": {"id": "77b7df28-cd49-4d70-bd52-38aa177e9bb4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-543587161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5551a241903f4911b27b7f4ab1c2f29d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a1439ce-fa5c-420d-bcf0-083f4cc002cd", "external-id": "nsx-vlan-transportzone-413", "segmentation_id": 413, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b58ad52-c0", "ovs_interfaceid": "0b58ad52-c0bc-42d5-8d95-358aed165658", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 936.741189] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:2e:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0a1439ce-fa5c-420d-bcf0-083f4cc002cd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0b58ad52-c0bc-42d5-8d95-358aed165658', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 936.749699] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 936.749955] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 936.750252] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b6f963eb-c6fb-4033-b258-c3839720836e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.774623] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 936.774623] env[69927]: value = "task-4096118" [ 936.774623] env[69927]: _type = "Task" [ 936.774623] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.783827] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096118, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.858773] env[69927]: DEBUG oslo_vmware.api [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096117, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.065962] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a523c018-edfc-4364-abc5-79911f55c111 tempest-ServersTestBootFromVolume-1196685189 tempest-ServersTestBootFromVolume-1196685189-project-member] Lock "5f67d6a0-e4b7-435e-8991-0f54e0379d22" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.284s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 937.271328] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Acquiring lock "9363c664-5848-408b-9b03-2dea4ceded90" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.271328] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Lock "9363c664-5848-408b-9b03-2dea4ceded90" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.271328] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Acquiring lock "9363c664-5848-408b-9b03-2dea4ceded90-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.271328] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Lock "9363c664-5848-408b-9b03-2dea4ceded90-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.271591] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Lock "9363c664-5848-408b-9b03-2dea4ceded90-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 937.274111] env[69927]: INFO nova.compute.manager [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Terminating instance [ 937.286548] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096118, 'name': CreateVM_Task, 'duration_secs': 0.375903} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.286779] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 937.287696] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.287980] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 937.288405] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 937.289541] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38717e38-3efd-47e4-8ee9-c8aeeb716e11 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.297249] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 937.297249] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5220ab63-a370-0281-be0b-27f866cdcb5e" [ 937.297249] env[69927]: _type = "Task" [ 937.297249] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.306909] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5220ab63-a370-0281-be0b-27f866cdcb5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.357277] env[69927]: DEBUG oslo_vmware.api [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096117, 'name': ReconfigVM_Task, 'duration_secs': 0.891829} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.357812] env[69927]: DEBUG oslo_concurrency.lockutils [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 937.358055] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Reconfigured VM to attach interface {{(pid=69927) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 937.559905] env[69927]: DEBUG nova.compute.manager [req-b7564513-1a0a-4f44-aa7b-a4db2a71ebb7 req-a6fd06d7-caa0-4101-aecb-0184abf18ef5 service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Received event network-changed-7c2fe55b-b50d-414d-bc2e-984a899ad2e4 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 937.560214] env[69927]: DEBUG nova.compute.manager [req-b7564513-1a0a-4f44-aa7b-a4db2a71ebb7 req-a6fd06d7-caa0-4101-aecb-0184abf18ef5 service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Refreshing instance network info cache due to event network-changed-7c2fe55b-b50d-414d-bc2e-984a899ad2e4. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 937.560628] env[69927]: DEBUG oslo_concurrency.lockutils [req-b7564513-1a0a-4f44-aa7b-a4db2a71ebb7 req-a6fd06d7-caa0-4101-aecb-0184abf18ef5 service nova] Acquiring lock "refresh_cache-ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.560628] env[69927]: DEBUG oslo_concurrency.lockutils [req-b7564513-1a0a-4f44-aa7b-a4db2a71ebb7 req-a6fd06d7-caa0-4101-aecb-0184abf18ef5 service nova] Acquired lock "refresh_cache-ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 937.560806] env[69927]: DEBUG nova.network.neutron [req-b7564513-1a0a-4f44-aa7b-a4db2a71ebb7 req-a6fd06d7-caa0-4101-aecb-0184abf18ef5 service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Refreshing network info cache for port 7c2fe55b-b50d-414d-bc2e-984a899ad2e4 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 937.781997] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Acquiring lock "refresh_cache-9363c664-5848-408b-9b03-2dea4ceded90" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.782946] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Acquired lock "refresh_cache-9363c664-5848-408b-9b03-2dea4ceded90" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 937.783191] env[69927]: DEBUG nova.network.neutron [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 937.813437] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5220ab63-a370-0281-be0b-27f866cdcb5e, 'name': SearchDatastore_Task, 'duration_secs': 0.027499} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.813806] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 937.814095] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 937.814360] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.814452] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 937.814634] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 937.814916] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ced1214-3681-4b34-b50d-3379f8cbdae3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.830278] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 937.830483] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 937.833891] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2939956b-03f4-4d47-b77e-d1331d43abe0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.840667] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 937.840667] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521b5157-205b-be91-8556-518828c870ad" [ 937.840667] env[69927]: _type = "Task" [ 937.840667] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.849019] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521b5157-205b-be91-8556-518828c870ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.865190] env[69927]: DEBUG oslo_concurrency.lockutils [None req-15326ea8-0ee2-4179-b793-4e41f6e9ad55 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "interface-ff227e07-8e36-48d6-a8c7-1e0087fd1faa-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.766s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.019177] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7426d1a7-304f-4227-a1c7-ce806b4ced89 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.028084] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a316450-fa2b-4121-9d87-be4465e2a179 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.060328] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04096316-d473-4817-9f85-216626f03209 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.070613] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ce95062-ce43-4a32-b50a-5a3d6c20b2e3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.087790] env[69927]: DEBUG nova.compute.provider_tree [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 938.311419] env[69927]: DEBUG nova.network.neutron [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 938.351729] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521b5157-205b-be91-8556-518828c870ad, 'name': SearchDatastore_Task, 'duration_secs': 0.02935} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.353014] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b02d347-dc27-4f97-bf77-84f7c13899cb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.359461] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 938.359461] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5248d907-7920-0a25-4dd2-e90e8c908932" [ 938.359461] env[69927]: _type = "Task" [ 938.359461] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.370904] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5248d907-7920-0a25-4dd2-e90e8c908932, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.393267] env[69927]: DEBUG nova.network.neutron [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.403698] env[69927]: DEBUG nova.network.neutron [req-b7564513-1a0a-4f44-aa7b-a4db2a71ebb7 req-a6fd06d7-caa0-4101-aecb-0184abf18ef5 service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Updated VIF entry in instance network info cache for port 7c2fe55b-b50d-414d-bc2e-984a899ad2e4. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 938.403698] env[69927]: DEBUG nova.network.neutron [req-b7564513-1a0a-4f44-aa7b-a4db2a71ebb7 req-a6fd06d7-caa0-4101-aecb-0184abf18ef5 service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Updating instance_info_cache with network_info: [{"id": "ef273cd9-400b-409d-b689-297040f2ca04", "address": "fa:16:3e:3c:b9:fd", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef273cd9-40", "ovs_interfaceid": "ef273cd9-400b-409d-b689-297040f2ca04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7c2fe55b-b50d-414d-bc2e-984a899ad2e4", "address": "fa:16:3e:ec:ab:07", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c2fe55b-b5", "ovs_interfaceid": "7c2fe55b-b50d-414d-bc2e-984a899ad2e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.445715] env[69927]: DEBUG nova.compute.manager [req-a619196a-402b-4441-bc17-0726b354cac0 req-73549e15-fb56-4295-81b3-5b6c9d5a521a service nova] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Received event network-changed-0b58ad52-c0bc-42d5-8d95-358aed165658 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 938.445963] env[69927]: DEBUG nova.compute.manager [req-a619196a-402b-4441-bc17-0726b354cac0 req-73549e15-fb56-4295-81b3-5b6c9d5a521a service nova] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Refreshing instance network info cache due to event network-changed-0b58ad52-c0bc-42d5-8d95-358aed165658. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 938.446185] env[69927]: DEBUG oslo_concurrency.lockutils [req-a619196a-402b-4441-bc17-0726b354cac0 req-73549e15-fb56-4295-81b3-5b6c9d5a521a service nova] Acquiring lock "refresh_cache-358ecaef-37f0-42be-acce-00f389650c97" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.446393] env[69927]: DEBUG oslo_concurrency.lockutils [req-a619196a-402b-4441-bc17-0726b354cac0 req-73549e15-fb56-4295-81b3-5b6c9d5a521a service nova] Acquired lock "refresh_cache-358ecaef-37f0-42be-acce-00f389650c97" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 938.446459] env[69927]: DEBUG nova.network.neutron [req-a619196a-402b-4441-bc17-0726b354cac0 req-73549e15-fb56-4295-81b3-5b6c9d5a521a service nova] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Refreshing network info cache for port 0b58ad52-c0bc-42d5-8d95-358aed165658 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 938.591168] env[69927]: DEBUG nova.scheduler.client.report [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 938.872868] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5248d907-7920-0a25-4dd2-e90e8c908932, 'name': SearchDatastore_Task, 'duration_secs': 0.030008} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.872868] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 938.872868] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 358ecaef-37f0-42be-acce-00f389650c97/358ecaef-37f0-42be-acce-00f389650c97.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 938.873143] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-449e79e3-5e21-47dc-acdf-79296b3c9c92 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.883463] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 938.883463] env[69927]: value = "task-4096119" [ 938.883463] env[69927]: _type = "Task" [ 938.883463] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.898050] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Releasing lock "refresh_cache-9363c664-5848-408b-9b03-2dea4ceded90" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 938.898050] env[69927]: DEBUG nova.compute.manager [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 938.898050] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 938.898050] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096119, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.899681] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e3f2b76-20c9-434f-8d6f-1c2aaa8997b5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.904528] env[69927]: DEBUG oslo_concurrency.lockutils [req-b7564513-1a0a-4f44-aa7b-a4db2a71ebb7 req-a6fd06d7-caa0-4101-aecb-0184abf18ef5 service nova] Releasing lock "refresh_cache-ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 938.907873] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 938.908177] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fb6fec2b-e33d-4014-a872-3dde6f09bf82 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.914825] env[69927]: DEBUG oslo_vmware.api [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Waiting for the task: (returnval){ [ 938.914825] env[69927]: value = "task-4096120" [ 938.914825] env[69927]: _type = "Task" [ 938.914825] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.924646] env[69927]: DEBUG oslo_vmware.api [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096120, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.099385] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.573s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.100023] env[69927]: DEBUG nova.compute.manager [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 939.107534] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.737s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.109685] env[69927]: INFO nova.compute.claims [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 939.402297] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096119, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.416127] env[69927]: DEBUG nova.network.neutron [req-a619196a-402b-4441-bc17-0726b354cac0 req-73549e15-fb56-4295-81b3-5b6c9d5a521a service nova] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Updated VIF entry in instance network info cache for port 0b58ad52-c0bc-42d5-8d95-358aed165658. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 939.416565] env[69927]: DEBUG nova.network.neutron [req-a619196a-402b-4441-bc17-0726b354cac0 req-73549e15-fb56-4295-81b3-5b6c9d5a521a service nova] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Updating instance_info_cache with network_info: [{"id": "0b58ad52-c0bc-42d5-8d95-358aed165658", "address": "fa:16:3e:c0:2e:d7", "network": {"id": "77b7df28-cd49-4d70-bd52-38aa177e9bb4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-543587161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5551a241903f4911b27b7f4ab1c2f29d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a1439ce-fa5c-420d-bcf0-083f4cc002cd", "external-id": "nsx-vlan-transportzone-413", "segmentation_id": 413, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b58ad52-c0", "ovs_interfaceid": "0b58ad52-c0bc-42d5-8d95-358aed165658", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.428052] env[69927]: DEBUG oslo_vmware.api [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096120, 'name': PowerOffVM_Task, 'duration_secs': 0.299808} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.428356] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 939.428527] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 939.428794] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ced64018-c60f-4e38-8377-0c2be6ca4c15 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.458708] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 939.459042] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 939.459240] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Deleting the datastore file [datastore1] 9363c664-5848-408b-9b03-2dea4ceded90 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 939.459507] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e3360a9a-396b-48f6-90d0-28311019e067 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.468270] env[69927]: DEBUG oslo_vmware.api [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Waiting for the task: (returnval){ [ 939.468270] env[69927]: value = "task-4096122" [ 939.468270] env[69927]: _type = "Task" [ 939.468270] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.478077] env[69927]: DEBUG oslo_vmware.api [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096122, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.622842] env[69927]: DEBUG nova.compute.utils [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 939.625330] env[69927]: DEBUG nova.compute.manager [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 939.625456] env[69927]: DEBUG nova.network.neutron [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 939.690397] env[69927]: DEBUG nova.policy [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd98497c8260f4692b8d5410447575350', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'babf071cfc564f7d83c28d449c774840', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 939.904124] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096119, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.547268} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.904423] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 358ecaef-37f0-42be-acce-00f389650c97/358ecaef-37f0-42be-acce-00f389650c97.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 939.904644] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 939.904908] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0ae6374b-0d78-47e7-8a0e-ee428dfabce0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.920382] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 939.920382] env[69927]: value = "task-4096123" [ 939.920382] env[69927]: _type = "Task" [ 939.920382] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.927190] env[69927]: DEBUG oslo_concurrency.lockutils [req-a619196a-402b-4441-bc17-0726b354cac0 req-73549e15-fb56-4295-81b3-5b6c9d5a521a service nova] Releasing lock "refresh_cache-358ecaef-37f0-42be-acce-00f389650c97" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 939.933530] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096123, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.978643] env[69927]: DEBUG oslo_vmware.api [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Task: {'id': task-4096122, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09339} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.978985] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 939.979116] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 939.979298] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 939.979472] env[69927]: INFO nova.compute.manager [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Took 1.08 seconds to destroy the instance on the hypervisor. [ 939.979892] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 939.979892] env[69927]: DEBUG nova.compute.manager [-] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 939.979986] env[69927]: DEBUG nova.network.neutron [-] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 940.008394] env[69927]: DEBUG nova.network.neutron [-] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 940.114318] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "interface-ff227e07-8e36-48d6-a8c7-1e0087fd1faa-7c2fe55b-b50d-414d-bc2e-984a899ad2e4" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.114318] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "interface-ff227e07-8e36-48d6-a8c7-1e0087fd1faa-7c2fe55b-b50d-414d-bc2e-984a899ad2e4" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.133020] env[69927]: DEBUG nova.compute.manager [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 940.283402] env[69927]: DEBUG nova.network.neutron [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Successfully created port: 0d5e4812-8ef2-4a9f-92e9-29113e1eb77e {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 940.433192] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096123, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.310991} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.434027] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 940.434297] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c5c1f0e-8030-4cd6-aaa8-80f62fbc5454 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.466485] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 358ecaef-37f0-42be-acce-00f389650c97/358ecaef-37f0-42be-acce-00f389650c97.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 940.469404] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd27c4a8-de2c-4eed-b227-cf53476e0fb8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.491428] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 940.491428] env[69927]: value = "task-4096124" [ 940.491428] env[69927]: _type = "Task" [ 940.491428] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.507874] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096124, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.510471] env[69927]: DEBUG nova.network.neutron [-] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.619307] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.619307] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 940.619307] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50648d51-a9cf-452e-9211-ea0df86be045 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.646751] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ea1316-07ee-4e80-b97f-f71948384e35 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.674442] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Reconfiguring VM to detach interface {{(pid=69927) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 940.677623] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ad43703-df84-4f01-a7f6-89adbd9b7352 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.700451] env[69927]: DEBUG oslo_vmware.api [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 940.700451] env[69927]: value = "task-4096125" [ 940.700451] env[69927]: _type = "Task" [ 940.700451] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.708569] env[69927]: DEBUG oslo_vmware.api [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096125, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.778348] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13294bf1-fbcf-4be5-acf4-3c55393d93c1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.787465] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c570922b-a5a9-4e89-9dcd-977cbdaab2de {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.822447] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d53ddc70-34c8-4b5c-833d-9ea703992de9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.831568] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-203cd152-409c-48c9-850d-09af2aa442f3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.847585] env[69927]: DEBUG nova.compute.provider_tree [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 941.002327] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096124, 'name': ReconfigVM_Task, 'duration_secs': 0.293433} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.003704] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 358ecaef-37f0-42be-acce-00f389650c97/358ecaef-37f0-42be-acce-00f389650c97.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 941.003704] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-14cb8e7c-e84f-41a3-99a1-40a209b96e40 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.011689] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 941.011689] env[69927]: value = "task-4096126" [ 941.011689] env[69927]: _type = "Task" [ 941.011689] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.015732] env[69927]: INFO nova.compute.manager [-] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Took 1.04 seconds to deallocate network for instance. [ 941.026971] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096126, 'name': Rename_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.151658] env[69927]: DEBUG nova.compute.manager [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 941.189241] env[69927]: DEBUG nova.virt.hardware [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 941.189782] env[69927]: DEBUG nova.virt.hardware [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 941.189882] env[69927]: DEBUG nova.virt.hardware [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 941.190240] env[69927]: DEBUG nova.virt.hardware [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 941.190463] env[69927]: DEBUG nova.virt.hardware [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 941.190727] env[69927]: DEBUG nova.virt.hardware [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 941.191069] env[69927]: DEBUG nova.virt.hardware [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 941.191442] env[69927]: DEBUG nova.virt.hardware [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 941.191656] env[69927]: DEBUG nova.virt.hardware [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 941.192338] env[69927]: DEBUG nova.virt.hardware [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 941.192338] env[69927]: DEBUG nova.virt.hardware [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 941.195733] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c89ed304-b6ce-4079-927d-7b6468a77479 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.206599] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d9305a-9414-4509-8fbb-249cf7a53240 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.215790] env[69927]: DEBUG oslo_vmware.api [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096125, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.372885] env[69927]: ERROR nova.scheduler.client.report [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [req-b6f6e190-7d0b-4ccc-96c7-8a967a6c3289] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2f529b36-df5f-4b37-8103-68f74f737726. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b6f6e190-7d0b-4ccc-96c7-8a967a6c3289"}]} [ 941.390456] env[69927]: DEBUG nova.scheduler.client.report [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Refreshing inventories for resource provider 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 941.407218] env[69927]: DEBUG nova.scheduler.client.report [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Updating ProviderTree inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 941.407461] env[69927]: DEBUG nova.compute.provider_tree [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 941.422016] env[69927]: DEBUG nova.scheduler.client.report [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Refreshing aggregate associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, aggregates: None {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 941.442873] env[69927]: DEBUG nova.scheduler.client.report [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Refreshing trait associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 941.522878] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 941.523260] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096126, 'name': Rename_Task, 'duration_secs': 0.140701} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.526503] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 941.527637] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dfa82a1c-976b-43ca-9702-d861dd867e0f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.536227] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 941.536227] env[69927]: value = "task-4096127" [ 941.536227] env[69927]: _type = "Task" [ 941.536227] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.549078] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096127, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.713828] env[69927]: DEBUG oslo_vmware.api [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096125, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.981272] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef166b32-74ae-411e-8d1b-5f7aae3b1672 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.993297] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28204ed0-545d-4a8b-a416-c2fc5f693c78 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.037456] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a0e6482-1bed-40dd-95a5-9f277fde8351 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.051496] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f09fd9-759f-4b40-9dcc-ac66ce0a3f9d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.056107] env[69927]: DEBUG oslo_vmware.api [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096127, 'name': PowerOnVM_Task, 'duration_secs': 0.493718} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.056107] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 942.056107] env[69927]: INFO nova.compute.manager [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Took 7.80 seconds to spawn the instance on the hypervisor. [ 942.056107] env[69927]: DEBUG nova.compute.manager [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 942.057318] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d97060bf-7d76-4ca5-8b52-6f9f888cdd37 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.067983] env[69927]: DEBUG nova.compute.provider_tree [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 942.219918] env[69927]: DEBUG oslo_vmware.api [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096125, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.231992] env[69927]: DEBUG nova.compute.manager [req-cde6d45d-d37b-4679-b682-7c8f0f2098ef req-4481f570-ad04-46e0-a882-81ce259030b7 service nova] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Received event network-vif-plugged-0d5e4812-8ef2-4a9f-92e9-29113e1eb77e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 942.232255] env[69927]: DEBUG oslo_concurrency.lockutils [req-cde6d45d-d37b-4679-b682-7c8f0f2098ef req-4481f570-ad04-46e0-a882-81ce259030b7 service nova] Acquiring lock "30d9d1ac-4be0-4723-86b5-0aceda88e67b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 942.232471] env[69927]: DEBUG oslo_concurrency.lockutils [req-cde6d45d-d37b-4679-b682-7c8f0f2098ef req-4481f570-ad04-46e0-a882-81ce259030b7 service nova] Lock "30d9d1ac-4be0-4723-86b5-0aceda88e67b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 942.232801] env[69927]: DEBUG oslo_concurrency.lockutils [req-cde6d45d-d37b-4679-b682-7c8f0f2098ef req-4481f570-ad04-46e0-a882-81ce259030b7 service nova] Lock "30d9d1ac-4be0-4723-86b5-0aceda88e67b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 942.232801] env[69927]: DEBUG nova.compute.manager [req-cde6d45d-d37b-4679-b682-7c8f0f2098ef req-4481f570-ad04-46e0-a882-81ce259030b7 service nova] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] No waiting events found dispatching network-vif-plugged-0d5e4812-8ef2-4a9f-92e9-29113e1eb77e {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 942.232997] env[69927]: WARNING nova.compute.manager [req-cde6d45d-d37b-4679-b682-7c8f0f2098ef req-4481f570-ad04-46e0-a882-81ce259030b7 service nova] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Received unexpected event network-vif-plugged-0d5e4812-8ef2-4a9f-92e9-29113e1eb77e for instance with vm_state building and task_state spawning. [ 942.378745] env[69927]: DEBUG nova.network.neutron [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Successfully updated port: 0d5e4812-8ef2-4a9f-92e9-29113e1eb77e {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 942.590812] env[69927]: INFO nova.compute.manager [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Took 50.23 seconds to build instance. [ 942.616020] env[69927]: DEBUG nova.scheduler.client.report [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Updated inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 with generation 94 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 942.616020] env[69927]: DEBUG nova.compute.provider_tree [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Updating resource provider 2f529b36-df5f-4b37-8103-68f74f737726 generation from 94 to 95 during operation: update_inventory {{(pid=69927) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 942.616020] env[69927]: DEBUG nova.compute.provider_tree [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 942.715521] env[69927]: DEBUG oslo_vmware.api [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096125, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.885151] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquiring lock "refresh_cache-30d9d1ac-4be0-4723-86b5-0aceda88e67b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.885537] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquired lock "refresh_cache-30d9d1ac-4be0-4723-86b5-0aceda88e67b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 942.886318] env[69927]: DEBUG nova.network.neutron [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 943.092148] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f52a5e84-2743-4da2-b9cf-62e5009c2daf tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "358ecaef-37f0-42be-acce-00f389650c97" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.600s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.120134] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.013s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.120717] env[69927]: DEBUG nova.compute.manager [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 943.128021] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.989s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.128021] env[69927]: INFO nova.compute.claims [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 943.216178] env[69927]: DEBUG oslo_vmware.api [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096125, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.449219] env[69927]: DEBUG nova.network.neutron [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 943.598250] env[69927]: DEBUG nova.compute.manager [None req-72639dea-3299-475d-b48a-8e1783bc1cf5 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2ab21cdb-165b-4b71-a865-1a72cfb430c6] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 943.633322] env[69927]: DEBUG nova.compute.utils [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 943.642924] env[69927]: DEBUG nova.compute.manager [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 943.642924] env[69927]: DEBUG nova.network.neutron [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 943.704782] env[69927]: DEBUG nova.policy [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20629f26389d40199a4c5d5d2312dbae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c2fb1fc4c3ae41a5b331c6be7973eb72', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 943.718584] env[69927]: DEBUG oslo_vmware.api [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096125, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.915612] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "8be7e64c-7bc6-41a0-ada5-0a5057a2af45" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 943.916092] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "8be7e64c-7bc6-41a0-ada5-0a5057a2af45" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.916327] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "8be7e64c-7bc6-41a0-ada5-0a5057a2af45-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 943.916571] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "8be7e64c-7bc6-41a0-ada5-0a5057a2af45-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.916695] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "8be7e64c-7bc6-41a0-ada5-0a5057a2af45-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.919626] env[69927]: INFO nova.compute.manager [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Terminating instance [ 944.000649] env[69927]: DEBUG nova.network.neutron [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Updating instance_info_cache with network_info: [{"id": "0d5e4812-8ef2-4a9f-92e9-29113e1eb77e", "address": "fa:16:3e:97:bf:d7", "network": {"id": "93ea9667-6d8f-46ae-8c4a-af58996dbb46", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1534634422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babf071cfc564f7d83c28d449c774840", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d5e4812-8e", "ovs_interfaceid": "0d5e4812-8ef2-4a9f-92e9-29113e1eb77e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.009099] env[69927]: DEBUG oslo_concurrency.lockutils [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "358ecaef-37f0-42be-acce-00f389650c97" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.009362] env[69927]: DEBUG oslo_concurrency.lockutils [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "358ecaef-37f0-42be-acce-00f389650c97" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.009561] env[69927]: DEBUG oslo_concurrency.lockutils [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "358ecaef-37f0-42be-acce-00f389650c97-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.009735] env[69927]: DEBUG oslo_concurrency.lockutils [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "358ecaef-37f0-42be-acce-00f389650c97-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.010060] env[69927]: DEBUG oslo_concurrency.lockutils [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "358ecaef-37f0-42be-acce-00f389650c97-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.012154] env[69927]: INFO nova.compute.manager [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Terminating instance [ 944.101290] env[69927]: DEBUG nova.compute.manager [None req-72639dea-3299-475d-b48a-8e1783bc1cf5 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2ab21cdb-165b-4b71-a865-1a72cfb430c6] Instance disappeared before build. {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2446}} [ 944.141223] env[69927]: DEBUG nova.compute.manager [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 944.142840] env[69927]: DEBUG nova.network.neutron [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Successfully created port: 0780f226-29c3-4879-8d9c-5dfd33960929 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 944.221178] env[69927]: DEBUG oslo_vmware.api [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096125, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.427287] env[69927]: DEBUG nova.compute.manager [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 944.427516] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 944.428930] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d6bbe5-769a-40f5-b615-0dec38ceabcc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.441250] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 944.441611] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-925cf12f-327c-45c2-8ae5-b4ba3c5dce54 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.449578] env[69927]: DEBUG oslo_vmware.api [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 944.449578] env[69927]: value = "task-4096128" [ 944.449578] env[69927]: _type = "Task" [ 944.449578] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.459233] env[69927]: DEBUG oslo_vmware.api [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096128, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.505032] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Releasing lock "refresh_cache-30d9d1ac-4be0-4723-86b5-0aceda88e67b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 944.505381] env[69927]: DEBUG nova.compute.manager [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Instance network_info: |[{"id": "0d5e4812-8ef2-4a9f-92e9-29113e1eb77e", "address": "fa:16:3e:97:bf:d7", "network": {"id": "93ea9667-6d8f-46ae-8c4a-af58996dbb46", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1534634422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babf071cfc564f7d83c28d449c774840", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d5e4812-8e", "ovs_interfaceid": "0d5e4812-8ef2-4a9f-92e9-29113e1eb77e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 944.505811] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:bf:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69d412f5-01a9-4fed-8488-7b767a13a653', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0d5e4812-8ef2-4a9f-92e9-29113e1eb77e', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 944.515726] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 944.515881] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 944.516306] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8f43fa4b-cf33-4cab-9de2-2a0f293642ee {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.536430] env[69927]: DEBUG nova.compute.manager [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 944.536775] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 944.538319] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-481c1e3a-ac9a-4261-bf2e-10801fe4627e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.547644] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 944.549827] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b3c3ebbe-1d56-4b85-a26c-5e914d29d88f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.552011] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 944.552011] env[69927]: value = "task-4096129" [ 944.552011] env[69927]: _type = "Task" [ 944.552011] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.562051] env[69927]: DEBUG nova.compute.manager [req-8e1f237a-94a7-42f0-9193-5eb90ec29dc6 req-804b2088-68fa-4cc4-b117-939b6371a34d service nova] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Received event network-changed-0d5e4812-8ef2-4a9f-92e9-29113e1eb77e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 944.562051] env[69927]: DEBUG nova.compute.manager [req-8e1f237a-94a7-42f0-9193-5eb90ec29dc6 req-804b2088-68fa-4cc4-b117-939b6371a34d service nova] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Refreshing instance network info cache due to event network-changed-0d5e4812-8ef2-4a9f-92e9-29113e1eb77e. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 944.562051] env[69927]: DEBUG oslo_concurrency.lockutils [req-8e1f237a-94a7-42f0-9193-5eb90ec29dc6 req-804b2088-68fa-4cc4-b117-939b6371a34d service nova] Acquiring lock "refresh_cache-30d9d1ac-4be0-4723-86b5-0aceda88e67b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.563306] env[69927]: DEBUG oslo_concurrency.lockutils [req-8e1f237a-94a7-42f0-9193-5eb90ec29dc6 req-804b2088-68fa-4cc4-b117-939b6371a34d service nova] Acquired lock "refresh_cache-30d9d1ac-4be0-4723-86b5-0aceda88e67b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 944.563306] env[69927]: DEBUG nova.network.neutron [req-8e1f237a-94a7-42f0-9193-5eb90ec29dc6 req-804b2088-68fa-4cc4-b117-939b6371a34d service nova] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Refreshing network info cache for port 0d5e4812-8ef2-4a9f-92e9-29113e1eb77e {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 944.566386] env[69927]: DEBUG oslo_vmware.api [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 944.566386] env[69927]: value = "task-4096130" [ 944.566386] env[69927]: _type = "Task" [ 944.566386] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.575532] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096129, 'name': CreateVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.584344] env[69927]: DEBUG oslo_vmware.api [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096130, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.619710] env[69927]: DEBUG oslo_concurrency.lockutils [None req-72639dea-3299-475d-b48a-8e1783bc1cf5 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "2ab21cdb-165b-4b71-a865-1a72cfb430c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.540s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.723168] env[69927]: DEBUG oslo_vmware.api [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096125, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.827754] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-361613c2-fcf9-47a0-aec4-ffbe89f85207 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.839155] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e6a596-6488-4f69-bb16-05d5f9d30f7d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.878544] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed12afb6-d51c-466b-afc2-0a9a26b52662 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.886887] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c9371a-4936-4052-b8a4-9914fe6364fb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.901342] env[69927]: DEBUG nova.compute.provider_tree [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 944.959933] env[69927]: DEBUG oslo_vmware.api [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096128, 'name': PowerOffVM_Task, 'duration_secs': 0.239572} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.960231] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 944.960402] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 944.960654] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-535157f0-c652-406a-92cb-3bbbd152200c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.027111] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 945.027261] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 945.027447] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Deleting the datastore file [datastore1] 8be7e64c-7bc6-41a0-ada5-0a5057a2af45 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 945.027714] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1843db95-7a76-45db-97b0-ffe2fd8dbd7c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.036496] env[69927]: DEBUG oslo_vmware.api [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 945.036496] env[69927]: value = "task-4096132" [ 945.036496] env[69927]: _type = "Task" [ 945.036496] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.046772] env[69927]: DEBUG oslo_vmware.api [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096132, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.065239] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096129, 'name': CreateVM_Task, 'duration_secs': 0.401716} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.065239] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 945.067299] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.067611] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 945.068059] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 945.072407] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62616065-fbe5-419d-ba86-cb429df93233 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.083699] env[69927]: DEBUG oslo_vmware.api [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 945.083699] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52366973-984e-2e8b-2257-d354bd949b36" [ 945.083699] env[69927]: _type = "Task" [ 945.083699] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.087300] env[69927]: DEBUG oslo_vmware.api [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096130, 'name': PowerOffVM_Task, 'duration_secs': 0.232524} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.093357] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 945.093357] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 945.093357] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f9f9d234-c121-4e65-b992-054082a70421 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.101145] env[69927]: DEBUG oslo_vmware.api [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52366973-984e-2e8b-2257-d354bd949b36, 'name': SearchDatastore_Task, 'duration_secs': 0.010643} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.101577] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 945.101943] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 945.102468] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.103258] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 945.103857] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 945.104272] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0952ddb6-7d51-4643-810b-5e569513d80d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.116754] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 945.116754] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 945.116754] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8389edc-ebde-4ff0-aa00-298623d60dcd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.125018] env[69927]: DEBUG nova.compute.manager [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 945.127254] env[69927]: DEBUG oslo_vmware.api [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 945.127254] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52175f97-104b-53d0-144c-daf26061a9ee" [ 945.127254] env[69927]: _type = "Task" [ 945.127254] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.138735] env[69927]: DEBUG oslo_vmware.api [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52175f97-104b-53d0-144c-daf26061a9ee, 'name': SearchDatastore_Task, 'duration_secs': 0.011218} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.145021] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ef00be9-9888-42ad-bdac-0bbc3ccab3c1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.152022] env[69927]: DEBUG oslo_vmware.api [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 945.152022] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d6eaa4-0611-288f-d165-5f2ee4b6e54c" [ 945.152022] env[69927]: _type = "Task" [ 945.152022] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.158297] env[69927]: DEBUG nova.compute.manager [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 945.166356] env[69927]: DEBUG oslo_vmware.api [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d6eaa4-0611-288f-d165-5f2ee4b6e54c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.168421] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 945.168948] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 945.169347] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Deleting the datastore file [datastore1] 358ecaef-37f0-42be-acce-00f389650c97 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 945.170787] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a838e166-eeaa-4084-ab55-91cc14d1fa47 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.180692] env[69927]: DEBUG oslo_vmware.api [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for the task: (returnval){ [ 945.180692] env[69927]: value = "task-4096134" [ 945.180692] env[69927]: _type = "Task" [ 945.180692] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.198919] env[69927]: DEBUG oslo_vmware.api [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096134, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.202183] env[69927]: DEBUG nova.virt.hardware [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 945.202552] env[69927]: DEBUG nova.virt.hardware [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 945.202688] env[69927]: DEBUG nova.virt.hardware [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 945.203042] env[69927]: DEBUG nova.virt.hardware [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 945.203137] env[69927]: DEBUG nova.virt.hardware [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 945.203242] env[69927]: DEBUG nova.virt.hardware [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 945.203488] env[69927]: DEBUG nova.virt.hardware [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 945.203644] env[69927]: DEBUG nova.virt.hardware [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 945.204082] env[69927]: DEBUG nova.virt.hardware [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 945.204343] env[69927]: DEBUG nova.virt.hardware [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 945.204534] env[69927]: DEBUG nova.virt.hardware [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 945.205814] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6cf7a5e-36d2-411f-a8e6-afc8063f8ef9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.219889] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ffa098-ebf8-4fcc-a04b-afdf26bad1b0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.228192] env[69927]: DEBUG oslo_vmware.api [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096125, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.437370] env[69927]: DEBUG nova.scheduler.client.report [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Updated inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 with generation 95 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 945.437625] env[69927]: DEBUG nova.compute.provider_tree [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Updating resource provider 2f529b36-df5f-4b37-8103-68f74f737726 generation from 95 to 96 during operation: update_inventory {{(pid=69927) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 945.437801] env[69927]: DEBUG nova.compute.provider_tree [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 945.473589] env[69927]: DEBUG nova.network.neutron [req-8e1f237a-94a7-42f0-9193-5eb90ec29dc6 req-804b2088-68fa-4cc4-b117-939b6371a34d service nova] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Updated VIF entry in instance network info cache for port 0d5e4812-8ef2-4a9f-92e9-29113e1eb77e. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 945.473953] env[69927]: DEBUG nova.network.neutron [req-8e1f237a-94a7-42f0-9193-5eb90ec29dc6 req-804b2088-68fa-4cc4-b117-939b6371a34d service nova] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Updating instance_info_cache with network_info: [{"id": "0d5e4812-8ef2-4a9f-92e9-29113e1eb77e", "address": "fa:16:3e:97:bf:d7", "network": {"id": "93ea9667-6d8f-46ae-8c4a-af58996dbb46", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1534634422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babf071cfc564f7d83c28d449c774840", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d5e4812-8e", "ovs_interfaceid": "0d5e4812-8ef2-4a9f-92e9-29113e1eb77e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.547713] env[69927]: DEBUG oslo_vmware.api [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096132, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166193} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.547713] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 945.547713] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 945.547713] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 945.547975] env[69927]: INFO nova.compute.manager [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Took 1.12 seconds to destroy the instance on the hypervisor. [ 945.548257] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 945.548472] env[69927]: DEBUG nova.compute.manager [-] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 945.548543] env[69927]: DEBUG nova.network.neutron [-] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 945.657361] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 945.661582] env[69927]: DEBUG oslo_vmware.api [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d6eaa4-0611-288f-d165-5f2ee4b6e54c, 'name': SearchDatastore_Task, 'duration_secs': 0.012657} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.662086] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 945.662377] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 30d9d1ac-4be0-4723-86b5-0aceda88e67b/30d9d1ac-4be0-4723-86b5-0aceda88e67b.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 945.662650] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6e2136e4-fea9-45f7-8c3b-70c4bf034d6c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.671153] env[69927]: DEBUG oslo_vmware.api [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 945.671153] env[69927]: value = "task-4096135" [ 945.671153] env[69927]: _type = "Task" [ 945.671153] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.680019] env[69927]: DEBUG oslo_vmware.api [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096135, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.690009] env[69927]: DEBUG oslo_vmware.api [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Task: {'id': task-4096134, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15224} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.690301] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 945.690488] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 945.690666] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 945.690842] env[69927]: INFO nova.compute.manager [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Took 1.15 seconds to destroy the instance on the hypervisor. [ 945.691112] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 945.691315] env[69927]: DEBUG nova.compute.manager [-] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 945.691409] env[69927]: DEBUG nova.network.neutron [-] [instance: 358ecaef-37f0-42be-acce-00f389650c97] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 945.722591] env[69927]: DEBUG oslo_vmware.api [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096125, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.807409] env[69927]: DEBUG nova.network.neutron [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Successfully updated port: 0780f226-29c3-4879-8d9c-5dfd33960929 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 945.910426] env[69927]: DEBUG nova.compute.manager [req-da5e49d2-ec18-4b32-83c3-17f652f5704d req-56dec0fc-1c6b-4cd1-bc2f-c4d1f60ade18 service nova] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Received event network-vif-plugged-0780f226-29c3-4879-8d9c-5dfd33960929 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 945.910426] env[69927]: DEBUG oslo_concurrency.lockutils [req-da5e49d2-ec18-4b32-83c3-17f652f5704d req-56dec0fc-1c6b-4cd1-bc2f-c4d1f60ade18 service nova] Acquiring lock "44e81156-b0c7-4f68-9732-b39f41ebcd4b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 945.910426] env[69927]: DEBUG oslo_concurrency.lockutils [req-da5e49d2-ec18-4b32-83c3-17f652f5704d req-56dec0fc-1c6b-4cd1-bc2f-c4d1f60ade18 service nova] Lock "44e81156-b0c7-4f68-9732-b39f41ebcd4b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.910426] env[69927]: DEBUG oslo_concurrency.lockutils [req-da5e49d2-ec18-4b32-83c3-17f652f5704d req-56dec0fc-1c6b-4cd1-bc2f-c4d1f60ade18 service nova] Lock "44e81156-b0c7-4f68-9732-b39f41ebcd4b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.910426] env[69927]: DEBUG nova.compute.manager [req-da5e49d2-ec18-4b32-83c3-17f652f5704d req-56dec0fc-1c6b-4cd1-bc2f-c4d1f60ade18 service nova] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] No waiting events found dispatching network-vif-plugged-0780f226-29c3-4879-8d9c-5dfd33960929 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 945.910426] env[69927]: WARNING nova.compute.manager [req-da5e49d2-ec18-4b32-83c3-17f652f5704d req-56dec0fc-1c6b-4cd1-bc2f-c4d1f60ade18 service nova] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Received unexpected event network-vif-plugged-0780f226-29c3-4879-8d9c-5dfd33960929 for instance with vm_state building and task_state spawning. [ 945.944441] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.819s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.944827] env[69927]: DEBUG nova.compute.manager [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 945.950826] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.302s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.951079] env[69927]: DEBUG nova.objects.instance [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Lazy-loading 'resources' on Instance uuid 480a672c-cb48-45e3-86bd-1741957a5124 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 945.976848] env[69927]: DEBUG oslo_concurrency.lockutils [req-8e1f237a-94a7-42f0-9193-5eb90ec29dc6 req-804b2088-68fa-4cc4-b117-939b6371a34d service nova] Releasing lock "refresh_cache-30d9d1ac-4be0-4723-86b5-0aceda88e67b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 946.186933] env[69927]: DEBUG oslo_vmware.api [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096135, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.227517] env[69927]: DEBUG oslo_vmware.api [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096125, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.311089] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "refresh_cache-44e81156-b0c7-4f68-9732-b39f41ebcd4b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.311089] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquired lock "refresh_cache-44e81156-b0c7-4f68-9732-b39f41ebcd4b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 946.311089] env[69927]: DEBUG nova.network.neutron [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 946.464301] env[69927]: DEBUG nova.compute.utils [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 946.468022] env[69927]: DEBUG nova.compute.manager [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Not allocating networking since 'none' was specified. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 946.482047] env[69927]: DEBUG nova.network.neutron [-] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.626692] env[69927]: DEBUG nova.network.neutron [-] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.686234] env[69927]: DEBUG oslo_vmware.api [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096135, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.852498} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.686552] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 30d9d1ac-4be0-4723-86b5-0aceda88e67b/30d9d1ac-4be0-4723-86b5-0aceda88e67b.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 946.686811] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 946.687130] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1bf782e5-5856-459f-be9a-f15a8bbba4da {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.697121] env[69927]: DEBUG oslo_vmware.api [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 946.697121] env[69927]: value = "task-4096136" [ 946.697121] env[69927]: _type = "Task" [ 946.697121] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.707251] env[69927]: DEBUG oslo_vmware.api [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096136, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.727919] env[69927]: DEBUG oslo_vmware.api [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096125, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.860724] env[69927]: DEBUG nova.network.neutron [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 946.929992] env[69927]: DEBUG nova.compute.manager [req-40cb0b88-7b85-450e-9e41-ed194591e9bb req-c73e2d15-8757-452a-bd30-1f485279eab9 service nova] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Received event network-vif-deleted-e6bbd21d-3bfb-40a1-ab40-d734248c04fb {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 946.931326] env[69927]: DEBUG nova.compute.manager [req-40cb0b88-7b85-450e-9e41-ed194591e9bb req-c73e2d15-8757-452a-bd30-1f485279eab9 service nova] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Received event network-vif-deleted-0b58ad52-c0bc-42d5-8d95-358aed165658 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 946.969406] env[69927]: DEBUG nova.compute.manager [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 946.986199] env[69927]: INFO nova.compute.manager [-] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Took 1.44 seconds to deallocate network for instance. [ 947.036470] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced5b1a6-768f-42c8-9cb9-7f456d0a1742 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.046455] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-844edb1b-b4f2-4388-aa4e-6536b75670c5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.087797] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4606651e-ca10-44f7-8d0e-239cf143c036 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.091101] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "4b7934f8-2c97-480b-8af7-f09f6819e2b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.091482] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "4b7934f8-2c97-480b-8af7-f09f6819e2b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.098779] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a03cc1-e85b-4738-8101-bcad7444ca0d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.114985] env[69927]: DEBUG nova.compute.provider_tree [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 947.125571] env[69927]: DEBUG nova.network.neutron [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Updating instance_info_cache with network_info: [{"id": "0780f226-29c3-4879-8d9c-5dfd33960929", "address": "fa:16:3e:b1:d8:31", "network": {"id": "045356e7-ce71-4c33-9121-8655a915fc1d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1277828099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2fb1fc4c3ae41a5b331c6be7973eb72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0780f226-29", "ovs_interfaceid": "0780f226-29c3-4879-8d9c-5dfd33960929", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.130555] env[69927]: INFO nova.compute.manager [-] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Took 1.44 seconds to deallocate network for instance. [ 947.209133] env[69927]: DEBUG oslo_vmware.api [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096136, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.183067} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.209475] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 947.210432] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48dfe9be-fce6-4b14-bd95-b0a999807ee7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.234897] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 30d9d1ac-4be0-4723-86b5-0aceda88e67b/30d9d1ac-4be0-4723-86b5-0aceda88e67b.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 947.239226] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec85b0c1-a9a5-493b-9cb5-f3381db95116 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.261447] env[69927]: DEBUG oslo_vmware.api [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096125, 'name': ReconfigVM_Task, 'duration_secs': 6.331388} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.262765] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 947.263041] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Reconfigured VM to detach interface {{(pid=69927) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 947.265351] env[69927]: DEBUG oslo_vmware.api [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 947.265351] env[69927]: value = "task-4096137" [ 947.265351] env[69927]: _type = "Task" [ 947.265351] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.275048] env[69927]: DEBUG oslo_vmware.api [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096137, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.492849] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.618107] env[69927]: DEBUG nova.scheduler.client.report [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 947.628039] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Releasing lock "refresh_cache-44e81156-b0c7-4f68-9732-b39f41ebcd4b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 947.628384] env[69927]: DEBUG nova.compute.manager [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Instance network_info: |[{"id": "0780f226-29c3-4879-8d9c-5dfd33960929", "address": "fa:16:3e:b1:d8:31", "network": {"id": "045356e7-ce71-4c33-9121-8655a915fc1d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1277828099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2fb1fc4c3ae41a5b331c6be7973eb72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0780f226-29", "ovs_interfaceid": "0780f226-29c3-4879-8d9c-5dfd33960929", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 947.629183] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:d8:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40c947c4-f471-4d48-8e43-fee54198107e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0780f226-29c3-4879-8d9c-5dfd33960929', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 947.638560] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 947.640181] env[69927]: DEBUG oslo_concurrency.lockutils [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.640435] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 947.640997] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-24ac611f-919c-4045-9fdd-0f15d01fed29 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.674717] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 947.674717] env[69927]: value = "task-4096138" [ 947.674717] env[69927]: _type = "Task" [ 947.674717] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.687816] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096138, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.778526] env[69927]: DEBUG oslo_vmware.api [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096137, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.982336] env[69927]: DEBUG nova.compute.manager [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 948.007366] env[69927]: DEBUG nova.compute.manager [req-4320f4fa-831b-4904-89b4-21f8b0af218f req-d7c4de74-8ee6-47ed-b44b-31ffc32ae0e0 service nova] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Received event network-changed-0780f226-29c3-4879-8d9c-5dfd33960929 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 948.007553] env[69927]: DEBUG nova.compute.manager [req-4320f4fa-831b-4904-89b4-21f8b0af218f req-d7c4de74-8ee6-47ed-b44b-31ffc32ae0e0 service nova] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Refreshing instance network info cache due to event network-changed-0780f226-29c3-4879-8d9c-5dfd33960929. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 948.007760] env[69927]: DEBUG oslo_concurrency.lockutils [req-4320f4fa-831b-4904-89b4-21f8b0af218f req-d7c4de74-8ee6-47ed-b44b-31ffc32ae0e0 service nova] Acquiring lock "refresh_cache-44e81156-b0c7-4f68-9732-b39f41ebcd4b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.007901] env[69927]: DEBUG oslo_concurrency.lockutils [req-4320f4fa-831b-4904-89b4-21f8b0af218f req-d7c4de74-8ee6-47ed-b44b-31ffc32ae0e0 service nova] Acquired lock "refresh_cache-44e81156-b0c7-4f68-9732-b39f41ebcd4b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.008075] env[69927]: DEBUG nova.network.neutron [req-4320f4fa-831b-4904-89b4-21f8b0af218f req-d7c4de74-8ee6-47ed-b44b-31ffc32ae0e0 service nova] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Refreshing network info cache for port 0780f226-29c3-4879-8d9c-5dfd33960929 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 948.013770] env[69927]: DEBUG nova.virt.hardware [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 948.014541] env[69927]: DEBUG nova.virt.hardware [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 948.014749] env[69927]: DEBUG nova.virt.hardware [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 948.014949] env[69927]: DEBUG nova.virt.hardware [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 948.015124] env[69927]: DEBUG nova.virt.hardware [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 948.015276] env[69927]: DEBUG nova.virt.hardware [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 948.015646] env[69927]: DEBUG nova.virt.hardware [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 948.015646] env[69927]: DEBUG nova.virt.hardware [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 948.015806] env[69927]: DEBUG nova.virt.hardware [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 948.016025] env[69927]: DEBUG nova.virt.hardware [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 948.016199] env[69927]: DEBUG nova.virt.hardware [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 948.017502] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ebb62d-7c32-4310-be27-4f00053ff48f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.028410] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf38c451-8b97-46d5-bece-491d7509f0bd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.052078] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Instance VIF info [] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 948.058318] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Creating folder: Project (2b652d835e2146bf8153194a307f854c). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 948.059331] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3ac21b4c-5770-4d3c-976e-35a341263172 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.073580] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Created folder: Project (2b652d835e2146bf8153194a307f854c) in parent group-v811283. [ 948.073860] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Creating folder: Instances. Parent ref: group-v811471. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 948.074423] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e6fda8f8-b2e7-4784-bc18-931fe823f453 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.087918] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Created folder: Instances in parent group-v811471. [ 948.088185] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 948.088428] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 948.089600] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a6f13fe-82a8-4e5e-bbf4-0f6d2e4d5950 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.107532] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 948.107532] env[69927]: value = "task-4096141" [ 948.107532] env[69927]: _type = "Task" [ 948.107532] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.116625] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096141, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.124883] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.174s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.127574] env[69927]: DEBUG oslo_concurrency.lockutils [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.389s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.127820] env[69927]: DEBUG nova.objects.instance [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lazy-loading 'resources' on Instance uuid f6972b90-7746-4a37-8be8-1739f96dc3dc {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 948.152043] env[69927]: INFO nova.scheduler.client.report [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Deleted allocations for instance 480a672c-cb48-45e3-86bd-1741957a5124 [ 948.187686] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096138, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.278086] env[69927]: DEBUG oslo_vmware.api [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096137, 'name': ReconfigVM_Task, 'duration_secs': 0.90194} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.278307] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 30d9d1ac-4be0-4723-86b5-0aceda88e67b/30d9d1ac-4be0-4723-86b5-0aceda88e67b.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 948.278814] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-34aebd53-ee27-42db-acc2-93787abf8f80 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.286990] env[69927]: DEBUG oslo_vmware.api [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 948.286990] env[69927]: value = "task-4096142" [ 948.286990] env[69927]: _type = "Task" [ 948.286990] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.300251] env[69927]: DEBUG oslo_vmware.api [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096142, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.574949] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "refresh_cache-ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.575195] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "refresh_cache-ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.575385] env[69927]: DEBUG nova.network.neutron [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 948.621048] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096141, 'name': CreateVM_Task, 'duration_secs': 0.340466} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.621048] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 948.621251] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.621391] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.621706] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 948.621951] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e34f0e8-b6b7-4a08-b1a8-39c5024628a6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.626736] env[69927]: DEBUG oslo_vmware.api [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Waiting for the task: (returnval){ [ 948.626736] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5285ae90-c021-6284-1b36-fe415a81acc6" [ 948.626736] env[69927]: _type = "Task" [ 948.626736] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.638101] env[69927]: DEBUG oslo_vmware.api [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5285ae90-c021-6284-1b36-fe415a81acc6, 'name': SearchDatastore_Task} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.638342] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 948.638544] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 948.638777] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.638919] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.639110] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 948.639362] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8acfb6d0-9dca-42ad-918b-1adfa08e8067 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.651228] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 948.651410] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 948.652424] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4a4015b-05d1-49a6-ab7c-c668504e6de3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.661133] env[69927]: DEBUG oslo_vmware.api [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Waiting for the task: (returnval){ [ 948.661133] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521f0f96-6226-5c55-7d4a-f8ef8b639c67" [ 948.661133] env[69927]: _type = "Task" [ 948.661133] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.661562] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f8aaeaed-ed76-4327-b92c-00dc9be5973a tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Lock "480a672c-cb48-45e3-86bd-1741957a5124" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.658s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.673570] env[69927]: DEBUG oslo_vmware.api [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521f0f96-6226-5c55-7d4a-f8ef8b639c67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.682813] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096138, 'name': CreateVM_Task, 'duration_secs': 0.551307} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.685277] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 948.686133] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.686308] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.686631] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 948.687131] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c34e396-078d-4e1d-9f65-bc1444b6e8cd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.692651] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 948.692651] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528e386c-6272-71f0-1c71-4ef8ccfc444d" [ 948.692651] env[69927]: _type = "Task" [ 948.692651] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.705869] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528e386c-6272-71f0-1c71-4ef8ccfc444d, 'name': SearchDatastore_Task, 'duration_secs': 0.010462} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.706199] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 948.706426] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 948.706628] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.799876] env[69927]: DEBUG oslo_vmware.api [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096142, 'name': Rename_Task, 'duration_secs': 0.173904} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.800825] env[69927]: DEBUG nova.network.neutron [req-4320f4fa-831b-4904-89b4-21f8b0af218f req-d7c4de74-8ee6-47ed-b44b-31ffc32ae0e0 service nova] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Updated VIF entry in instance network info cache for port 0780f226-29c3-4879-8d9c-5dfd33960929. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 948.801154] env[69927]: DEBUG nova.network.neutron [req-4320f4fa-831b-4904-89b4-21f8b0af218f req-d7c4de74-8ee6-47ed-b44b-31ffc32ae0e0 service nova] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Updating instance_info_cache with network_info: [{"id": "0780f226-29c3-4879-8d9c-5dfd33960929", "address": "fa:16:3e:b1:d8:31", "network": {"id": "045356e7-ce71-4c33-9121-8655a915fc1d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1277828099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2fb1fc4c3ae41a5b331c6be7973eb72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0780f226-29", "ovs_interfaceid": "0780f226-29c3-4879-8d9c-5dfd33960929", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.802451] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 948.802896] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3e8e77a-b1e6-4be3-9ad3-6cf84c74837b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.813035] env[69927]: DEBUG oslo_vmware.api [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 948.813035] env[69927]: value = "task-4096143" [ 948.813035] env[69927]: _type = "Task" [ 948.813035] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.824016] env[69927]: DEBUG oslo_vmware.api [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096143, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.056313] env[69927]: DEBUG nova.compute.manager [req-33e5bbcb-c582-47ce-86cc-2f982707e060 req-7b19b47d-80ca-4058-a92e-2023f3b8b434 service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Received event network-vif-deleted-7c2fe55b-b50d-414d-bc2e-984a899ad2e4 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 949.056556] env[69927]: INFO nova.compute.manager [req-33e5bbcb-c582-47ce-86cc-2f982707e060 req-7b19b47d-80ca-4058-a92e-2023f3b8b434 service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Neutron deleted interface 7c2fe55b-b50d-414d-bc2e-984a899ad2e4; detaching it from the instance and deleting it from the info cache [ 949.056823] env[69927]: DEBUG nova.network.neutron [req-33e5bbcb-c582-47ce-86cc-2f982707e060 req-7b19b47d-80ca-4058-a92e-2023f3b8b434 service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Updating instance_info_cache with network_info: [{"id": "ef273cd9-400b-409d-b689-297040f2ca04", "address": "fa:16:3e:3c:b9:fd", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef273cd9-40", "ovs_interfaceid": "ef273cd9-400b-409d-b689-297040f2ca04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.061232] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "ff227e07-8e36-48d6-a8c7-1e0087fd1faa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.061456] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "ff227e07-8e36-48d6-a8c7-1e0087fd1faa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.061658] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "ff227e07-8e36-48d6-a8c7-1e0087fd1faa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.061825] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "ff227e07-8e36-48d6-a8c7-1e0087fd1faa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.061997] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "ff227e07-8e36-48d6-a8c7-1e0087fd1faa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.066185] env[69927]: INFO nova.compute.manager [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Terminating instance [ 949.164190] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eaf5129-35d5-40e1-be6b-8b12245893fd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.185302] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-164c9e85-53c5-4223-85e0-f988af10fb4e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.189485] env[69927]: DEBUG oslo_vmware.api [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521f0f96-6226-5c55-7d4a-f8ef8b639c67, 'name': SearchDatastore_Task, 'duration_secs': 0.00956} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.191268] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c49c20e2-8f3e-4681-9883-7a5ad23a5b75 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.227359] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a083c94-91f7-4462-895a-c5ee7cf64307 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.231848] env[69927]: DEBUG oslo_vmware.api [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Waiting for the task: (returnval){ [ 949.231848] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52668277-dc7c-77cc-07ad-1e76257ced5e" [ 949.231848] env[69927]: _type = "Task" [ 949.231848] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.239452] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d287e2-87b0-4d54-b4c1-443058184ded {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.250668] env[69927]: DEBUG oslo_vmware.api [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52668277-dc7c-77cc-07ad-1e76257ced5e, 'name': SearchDatastore_Task, 'duration_secs': 0.011} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.251474] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 949.251771] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 27e20d58-1150-4b90-b888-d84aff1954ef/27e20d58-1150-4b90-b888-d84aff1954ef.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 949.252082] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 949.252274] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 949.252492] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c938d8a3-ffc2-4f2e-bce0-4c2ee4960c5f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.263935] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-44ba2fa6-534a-4eab-aaf5-ace75cf99041 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.266751] env[69927]: DEBUG nova.compute.provider_tree [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 949.278677] env[69927]: DEBUG oslo_vmware.api [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Waiting for the task: (returnval){ [ 949.278677] env[69927]: value = "task-4096144" [ 949.278677] env[69927]: _type = "Task" [ 949.278677] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.285899] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 949.286217] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 949.287666] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37d44322-91e7-4d74-88f1-6e1e1147eef3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.295046] env[69927]: DEBUG oslo_vmware.api [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096144, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.299901] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 949.299901] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521aee4d-e18d-d91d-8f3c-c6afa35b07d5" [ 949.299901] env[69927]: _type = "Task" [ 949.299901] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.304389] env[69927]: DEBUG oslo_concurrency.lockutils [req-4320f4fa-831b-4904-89b4-21f8b0af218f req-d7c4de74-8ee6-47ed-b44b-31ffc32ae0e0 service nova] Releasing lock "refresh_cache-44e81156-b0c7-4f68-9732-b39f41ebcd4b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 949.311302] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521aee4d-e18d-d91d-8f3c-c6afa35b07d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.325395] env[69927]: DEBUG oslo_vmware.api [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096143, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.525456] env[69927]: INFO nova.network.neutron [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Port 7c2fe55b-b50d-414d-bc2e-984a899ad2e4 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 949.525811] env[69927]: DEBUG nova.network.neutron [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Updating instance_info_cache with network_info: [{"id": "ef273cd9-400b-409d-b689-297040f2ca04", "address": "fa:16:3e:3c:b9:fd", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef273cd9-40", "ovs_interfaceid": "ef273cd9-400b-409d-b689-297040f2ca04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.562124] env[69927]: DEBUG oslo_concurrency.lockutils [req-33e5bbcb-c582-47ce-86cc-2f982707e060 req-7b19b47d-80ca-4058-a92e-2023f3b8b434 service nova] Acquiring lock "ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.576123] env[69927]: DEBUG nova.compute.manager [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 949.576123] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 949.577783] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46945371-03ba-4eee-98da-3bf4ece49184 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.590667] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 949.591029] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-590492e4-542c-45f4-920b-71a5e58f1d16 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.602218] env[69927]: DEBUG oslo_vmware.api [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 949.602218] env[69927]: value = "task-4096145" [ 949.602218] env[69927]: _type = "Task" [ 949.602218] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.618691] env[69927]: DEBUG oslo_vmware.api [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096145, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.774151] env[69927]: DEBUG nova.scheduler.client.report [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 949.794084] env[69927]: DEBUG oslo_vmware.api [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096144, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.814452] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521aee4d-e18d-d91d-8f3c-c6afa35b07d5, 'name': SearchDatastore_Task, 'duration_secs': 0.01202} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.814452] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-722b66cc-d4a0-480e-a55e-31be2049c52a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.826308] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 949.826308] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d1be10-1660-159d-b377-784475a424b9" [ 949.826308] env[69927]: _type = "Task" [ 949.826308] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.830169] env[69927]: DEBUG oslo_vmware.api [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096143, 'name': PowerOnVM_Task, 'duration_secs': 0.517833} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.833797] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 949.834081] env[69927]: INFO nova.compute.manager [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Took 8.68 seconds to spawn the instance on the hypervisor. [ 949.834699] env[69927]: DEBUG nova.compute.manager [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 949.835130] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d1376d9-3d95-4fc5-85eb-c65b921aa9fb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.845501] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d1be10-1660-159d-b377-784475a424b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.031299] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "refresh_cache-ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 950.112942] env[69927]: DEBUG oslo_vmware.api [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096145, 'name': PowerOffVM_Task, 'duration_secs': 0.32198} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.113315] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 950.113502] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 950.113778] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dcb77479-98d8-4bd9-8aa1-969e01d10b69 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.208408] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 950.208653] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 950.208890] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Deleting the datastore file [datastore2] ff227e07-8e36-48d6-a8c7-1e0087fd1faa {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 950.209277] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dae5a5d3-3bfe-4e4a-9ab0-dcc4d12f5ed8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.218778] env[69927]: DEBUG oslo_vmware.api [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 950.218778] env[69927]: value = "task-4096147" [ 950.218778] env[69927]: _type = "Task" [ 950.218778] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.231793] env[69927]: DEBUG oslo_vmware.api [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096147, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.285576] env[69927]: DEBUG oslo_concurrency.lockutils [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.157s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.292076] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.744s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.293089] env[69927]: INFO nova.compute.claims [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 950.297205] env[69927]: DEBUG oslo_vmware.api [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096144, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530505} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.297628] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 27e20d58-1150-4b90-b888-d84aff1954ef/27e20d58-1150-4b90-b888-d84aff1954ef.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 950.297838] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 950.298104] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cbc92db1-d975-491a-9df4-140fb3f6a000 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.306363] env[69927]: DEBUG oslo_vmware.api [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Waiting for the task: (returnval){ [ 950.306363] env[69927]: value = "task-4096148" [ 950.306363] env[69927]: _type = "Task" [ 950.306363] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.322868] env[69927]: DEBUG oslo_vmware.api [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096148, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.324044] env[69927]: INFO nova.scheduler.client.report [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Deleted allocations for instance f6972b90-7746-4a37-8be8-1739f96dc3dc [ 950.342998] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d1be10-1660-159d-b377-784475a424b9, 'name': SearchDatastore_Task, 'duration_secs': 0.017174} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.342998] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 950.342998] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 44e81156-b0c7-4f68-9732-b39f41ebcd4b/44e81156-b0c7-4f68-9732-b39f41ebcd4b.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 950.342998] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dad9ed11-fae7-4aad-994a-3c088880b5c4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.355561] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 950.355561] env[69927]: value = "task-4096149" [ 950.355561] env[69927]: _type = "Task" [ 950.355561] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.362863] env[69927]: INFO nova.compute.manager [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Took 52.49 seconds to build instance. [ 950.374273] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096149, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.541236] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a5fe15d3-07e3-4c6c-8d84-c0c4ba9ca047 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "interface-ff227e07-8e36-48d6-a8c7-1e0087fd1faa-7c2fe55b-b50d-414d-bc2e-984a899ad2e4" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.427s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.733436] env[69927]: DEBUG oslo_vmware.api [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096147, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.26431} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.733727] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 950.733918] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 950.734149] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 950.734336] env[69927]: INFO nova.compute.manager [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Took 1.16 seconds to destroy the instance on the hypervisor. [ 950.734589] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 950.734789] env[69927]: DEBUG nova.compute.manager [-] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 950.734886] env[69927]: DEBUG nova.network.neutron [-] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 950.820210] env[69927]: DEBUG oslo_vmware.api [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096148, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086851} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.820597] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 950.821511] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0481883c-f70a-425b-83a9-ea694fba8180 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.832213] env[69927]: DEBUG oslo_concurrency.lockutils [None req-79d24ada-57d5-47b4-8461-c9b1d38e6fe1 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "f6972b90-7746-4a37-8be8-1739f96dc3dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.931s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.853490] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 27e20d58-1150-4b90-b888-d84aff1954ef/27e20d58-1150-4b90-b888-d84aff1954ef.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 950.853795] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-207fbf86-310a-4732-a638-7243439b1de2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.879026] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b376eea2-4019-429c-bead-04bfb5afe9d4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "30d9d1ac-4be0-4723-86b5-0aceda88e67b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.766s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.893844] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096149, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.895399] env[69927]: DEBUG oslo_vmware.api [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Waiting for the task: (returnval){ [ 950.895399] env[69927]: value = "task-4096150" [ 950.895399] env[69927]: _type = "Task" [ 950.895399] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.909193] env[69927]: DEBUG oslo_vmware.api [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096150, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.388725] env[69927]: DEBUG nova.compute.manager [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 951.413611] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096149, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565071} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.414074] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 44e81156-b0c7-4f68-9732-b39f41ebcd4b/44e81156-b0c7-4f68-9732-b39f41ebcd4b.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 951.414429] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 951.415183] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f3aa259b-0eb1-4fa3-96a6-26100db66a56 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.421301] env[69927]: DEBUG oslo_vmware.api [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096150, 'name': ReconfigVM_Task, 'duration_secs': 0.367387} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.421994] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 27e20d58-1150-4b90-b888-d84aff1954ef/27e20d58-1150-4b90-b888-d84aff1954ef.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 951.428183] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-80f2543e-23cf-43aa-895b-ff188dfed859 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.432505] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 951.432505] env[69927]: value = "task-4096151" [ 951.432505] env[69927]: _type = "Task" [ 951.432505] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.440431] env[69927]: DEBUG oslo_vmware.api [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Waiting for the task: (returnval){ [ 951.440431] env[69927]: value = "task-4096152" [ 951.440431] env[69927]: _type = "Task" [ 951.440431] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.446372] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096151, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.460394] env[69927]: DEBUG oslo_vmware.api [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096152, 'name': Rename_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.862776] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72682c36-a174-433b-9a29-c3987e2058b8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.875871] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c168503d-fdd7-4f75-b02c-3bbd39bc5991 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.917571] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48807364-ff2f-4f01-a680-6886c976f8d8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.927573] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673e42f6-a82d-49ed-9dc5-0d71c0766894 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.935196] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.948539] env[69927]: DEBUG nova.compute.provider_tree [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 951.960816] env[69927]: DEBUG oslo_vmware.api [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096152, 'name': Rename_Task, 'duration_secs': 0.155219} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.964413] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 951.965117] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096151, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081398} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.965383] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-607fb79d-091d-4cc9-aa9c-758fb83cf5b4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.967243] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 951.969208] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8bdedc7-9bfa-4a98-9547-33990c0f7bb3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.006293] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 44e81156-b0c7-4f68-9732-b39f41ebcd4b/44e81156-b0c7-4f68-9732-b39f41ebcd4b.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 952.006293] env[69927]: DEBUG oslo_vmware.api [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Waiting for the task: (returnval){ [ 952.006293] env[69927]: value = "task-4096153" [ 952.006293] env[69927]: _type = "Task" [ 952.006293] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.006293] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e02bc52f-9100-4b25-bcd9-58bbf638ea79 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.037296] env[69927]: DEBUG oslo_vmware.api [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096153, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.037296] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 952.037296] env[69927]: value = "task-4096154" [ 952.037296] env[69927]: _type = "Task" [ 952.037296] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.047695] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096154, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.421198] env[69927]: DEBUG nova.network.neutron [-] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.490469] env[69927]: ERROR nova.scheduler.client.report [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [req-9beed3bb-cba0-4152-8a6e-8ec592352254] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2f529b36-df5f-4b37-8103-68f74f737726. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9beed3bb-cba0-4152-8a6e-8ec592352254"}]} [ 952.519161] env[69927]: DEBUG nova.scheduler.client.report [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Refreshing inventories for resource provider 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 952.541950] env[69927]: DEBUG oslo_vmware.api [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096153, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.543179] env[69927]: DEBUG nova.scheduler.client.report [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Updating ProviderTree inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 952.543413] env[69927]: DEBUG nova.compute.provider_tree [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 952.559149] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096154, 'name': ReconfigVM_Task, 'duration_secs': 0.392287} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.559442] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 44e81156-b0c7-4f68-9732-b39f41ebcd4b/44e81156-b0c7-4f68-9732-b39f41ebcd4b.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 952.560108] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-50e60d9b-20aa-4f3b-bdf9-141b0779b21b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.564543] env[69927]: DEBUG nova.scheduler.client.report [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Refreshing aggregate associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, aggregates: None {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 952.569715] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 952.569715] env[69927]: value = "task-4096155" [ 952.569715] env[69927]: _type = "Task" [ 952.569715] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.582233] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096155, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.592971] env[69927]: DEBUG nova.scheduler.client.report [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Refreshing trait associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 952.622089] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Acquiring lock "93d19a66-f00e-4fa8-9eed-32035b020ba2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 952.622373] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Lock "93d19a66-f00e-4fa8-9eed-32035b020ba2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 952.847377] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "a2b1684f-82af-42fc-925e-db36f31cfe63" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 952.847664] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "a2b1684f-82af-42fc-925e-db36f31cfe63" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 952.847857] env[69927]: INFO nova.compute.manager [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Shelving [ 952.926362] env[69927]: INFO nova.compute.manager [-] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Took 2.19 seconds to deallocate network for instance. [ 953.035094] env[69927]: DEBUG oslo_vmware.api [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096153, 'name': PowerOnVM_Task, 'duration_secs': 0.59815} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.035094] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 953.035094] env[69927]: INFO nova.compute.manager [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Took 5.05 seconds to spawn the instance on the hypervisor. [ 953.035094] env[69927]: DEBUG nova.compute.manager [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 953.035094] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92495367-90c2-4864-87d0-151dc9d4c10e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.080987] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096155, 'name': Rename_Task, 'duration_secs': 0.339921} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.084345] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 953.085011] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-466647ae-0506-4723-9d24-c51df124d9b7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.092476] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 953.092476] env[69927]: value = "task-4096156" [ 953.092476] env[69927]: _type = "Task" [ 953.092476] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.105022] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096156, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.107116] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6849a996-04c0-4854-8afa-77daa6e588fb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.118493] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d8a536-34fd-49ab-936b-0905f21a98c0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.125586] env[69927]: DEBUG nova.compute.manager [req-b5089af5-ecbc-4f47-9dfc-fac41c76adc1 req-76d66dfa-8461-49ae-a1ba-59b846984be6 service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Received event network-vif-deleted-ef273cd9-400b-409d-b689-297040f2ca04 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 953.160904] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23efc022-7b84-4b75-9ccf-6a8fca3728e6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.169861] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc2670a-fa2c-4a59-85ba-4be3fa3f6764 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.188040] env[69927]: DEBUG nova.compute.provider_tree [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 953.433911] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.554242] env[69927]: INFO nova.compute.manager [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Took 50.43 seconds to build instance. [ 953.605171] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096156, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.712141] env[69927]: ERROR nova.scheduler.client.report [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [req-879ed2d2-d1f3-4761-a972-4c687714c13c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2f529b36-df5f-4b37-8103-68f74f737726. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-879ed2d2-d1f3-4761-a972-4c687714c13c"}]} [ 953.731759] env[69927]: DEBUG nova.scheduler.client.report [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Refreshing inventories for resource provider 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 953.734059] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.734327] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.747353] env[69927]: DEBUG nova.scheduler.client.report [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Updating ProviderTree inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 953.748346] env[69927]: DEBUG nova.compute.provider_tree [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 953.761020] env[69927]: DEBUG nova.scheduler.client.report [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Refreshing aggregate associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, aggregates: None {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 953.779765] env[69927]: DEBUG nova.scheduler.client.report [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Refreshing trait associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 953.862761] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 953.863332] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a70bedd5-7db4-4159-a9ac-6d7454b02123 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.871043] env[69927]: DEBUG oslo_vmware.api [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 953.871043] env[69927]: value = "task-4096157" [ 953.871043] env[69927]: _type = "Task" [ 953.871043] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.881588] env[69927]: DEBUG oslo_vmware.api [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096157, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.057508] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eeac361d-f234-48f7-9846-8633686a7827 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Lock "27e20d58-1150-4b90-b888-d84aff1954ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.515s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.108497] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096156, 'name': PowerOnVM_Task} progress is 87%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.347624] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8dc3cb-e022-4ea3-b36a-10965eaa862e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.356852] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb147987-dfef-4e8e-b08f-24452313be2b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.398035] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a312b859-812f-4a68-a9cd-c5a54940d6a2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.412360] env[69927]: DEBUG oslo_vmware.api [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096157, 'name': PowerOffVM_Task, 'duration_secs': 0.262762} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.413714] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03ed323-c4f9-4afb-b8fa-d23233fd01fb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.417779] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 954.418713] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2983575-170d-4ffa-b61c-985e57ed7d20 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.438735] env[69927]: INFO nova.compute.manager [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Rebuilding instance [ 954.450466] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b71816-8c7a-4f75-afcc-86d0e1bc73a1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.454628] env[69927]: DEBUG nova.compute.provider_tree [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 954.506164] env[69927]: DEBUG nova.compute.manager [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 954.507018] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ab691f-37be-468b-99f0-34ceb44b9d86 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.560511] env[69927]: DEBUG nova.compute.manager [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 954.605169] env[69927]: DEBUG oslo_vmware.api [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096156, 'name': PowerOnVM_Task, 'duration_secs': 1.082423} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.605436] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 954.605636] env[69927]: INFO nova.compute.manager [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Took 9.45 seconds to spawn the instance on the hypervisor. [ 954.605819] env[69927]: DEBUG nova.compute.manager [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 954.606666] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be5e906d-b15d-4a49-a2ee-341191debeea {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.760262] env[69927]: DEBUG nova.compute.manager [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 954.761681] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b63f278-1db5-43df-8cbd-57eafa517a66 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.966113] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Creating Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 954.967145] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d35bea16-d8a3-4b6a-a230-61c618ee4d83 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.977189] env[69927]: DEBUG oslo_vmware.api [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 954.977189] env[69927]: value = "task-4096158" [ 954.977189] env[69927]: _type = "Task" [ 954.977189] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.987864] env[69927]: DEBUG oslo_vmware.api [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096158, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.991369] env[69927]: DEBUG nova.scheduler.client.report [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Updated inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 with generation 99 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 954.991635] env[69927]: DEBUG nova.compute.provider_tree [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Updating resource provider 2f529b36-df5f-4b37-8103-68f74f737726 generation from 99 to 100 during operation: update_inventory {{(pid=69927) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 954.991999] env[69927]: DEBUG nova.compute.provider_tree [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 955.084446] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 955.124690] env[69927]: INFO nova.compute.manager [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Took 56.79 seconds to build instance. [ 955.275514] env[69927]: INFO nova.compute.manager [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] instance snapshotting [ 955.279508] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cdb7c29-db8a-4b35-98d0-3f155e407ca8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.300376] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d556efb-0fd6-4961-b7c0-08f04c0eedb0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.488143] env[69927]: DEBUG oslo_vmware.api [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096158, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.498248] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.207s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.498795] env[69927]: DEBUG nova.compute.manager [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 955.501334] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 48.730s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.501559] env[69927]: DEBUG nova.objects.instance [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Lazy-loading 'resources' on Instance uuid 8442f144-2be4-4634-b151-62f049a975b6 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 955.523357] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 955.523542] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a852d9a6-afbd-4438-88f9-e45a0d621daa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.532166] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Waiting for the task: (returnval){ [ 955.532166] env[69927]: value = "task-4096159" [ 955.532166] env[69927]: _type = "Task" [ 955.532166] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.541318] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096159, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.628833] env[69927]: DEBUG oslo_concurrency.lockutils [None req-db4b51d2-6987-4ee8-b489-1abc09eda9cc tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "44e81156-b0c7-4f68-9732-b39f41ebcd4b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.853s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.812263] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Creating Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 955.812657] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-cb824c55-14f5-4fb7-8173-5d6d926b98bd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.821033] env[69927]: DEBUG oslo_vmware.api [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 955.821033] env[69927]: value = "task-4096160" [ 955.821033] env[69927]: _type = "Task" [ 955.821033] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.830153] env[69927]: DEBUG oslo_vmware.api [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096160, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.989660] env[69927]: DEBUG oslo_vmware.api [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096158, 'name': CreateSnapshot_Task, 'duration_secs': 0.635551} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.990107] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Created Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 955.991307] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ebb9b10-74ec-459a-84b2-30e42993cc03 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.007649] env[69927]: DEBUG nova.compute.utils [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 956.014542] env[69927]: DEBUG nova.compute.manager [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 956.014942] env[69927]: DEBUG nova.network.neutron [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 956.046726] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096159, 'name': PowerOffVM_Task, 'duration_secs': 0.407133} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.047198] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 956.047339] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 956.048046] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c761aa26-d30b-4d1f-bde8-9a138931a87d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.056837] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 956.057470] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e5e82aa8-ff0b-439e-a637-0713ea29dbca {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.078631] env[69927]: DEBUG nova.policy [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de8b1b11969a4feb818dc682d2fec552', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61b1aea0ccf049c8942ba32932412497', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 956.085287] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 956.085519] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 956.085697] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Deleting the datastore file [datastore1] 27e20d58-1150-4b90-b888-d84aff1954ef {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 956.085994] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ecdbad97-06e5-405a-8edb-c9446a3156e1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.092319] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Waiting for the task: (returnval){ [ 956.092319] env[69927]: value = "task-4096162" [ 956.092319] env[69927]: _type = "Task" [ 956.092319] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.101738] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096162, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.131128] env[69927]: DEBUG nova.compute.manager [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 956.335560] env[69927]: DEBUG oslo_vmware.api [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096160, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.512443] env[69927]: DEBUG nova.compute.manager [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 956.522362] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Creating linked-clone VM from snapshot {{(pid=69927) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 956.522963] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-60e39c2c-baf8-4cd7-9413-c47781fe771f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.536646] env[69927]: DEBUG oslo_vmware.api [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 956.536646] env[69927]: value = "task-4096163" [ 956.536646] env[69927]: _type = "Task" [ 956.536646] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.548507] env[69927]: DEBUG oslo_vmware.api [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096163, 'name': CloneVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.556501] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4083f052-9d18-4684-9623-4bec1c4b4a96 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.559979] env[69927]: DEBUG nova.network.neutron [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Successfully created port: fc4d69e0-0a53-4c34-8f56-6416a884b018 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 956.568761] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-628b7e6f-c515-42dc-a799-f916e4e464ec {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.605863] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b224a9e8-4817-4cb0-9cae-f2c11f91f55a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.419788] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096162, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096722} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.431958] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27156c66-492a-4506-a9e1-bafd58a3aaf2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.437511] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 957.437511] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 957.437511] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 957.459082] env[69927]: DEBUG oslo_vmware.api [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096160, 'name': CreateSnapshot_Task, 'duration_secs': 0.961848} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.459082] env[69927]: DEBUG nova.compute.provider_tree [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 957.460537] env[69927]: DEBUG oslo_vmware.api [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096163, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.462149] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Created Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 957.462385] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b531bf-c0b3-4ef5-874e-167eda9891fe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.477106] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.927031] env[69927]: DEBUG nova.compute.manager [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 957.927031] env[69927]: DEBUG nova.compute.manager [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 957.931092] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e14aa75-9db3-41a6-9839-fc0ac92e0d4c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.946275] env[69927]: DEBUG oslo_vmware.api [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096163, 'name': CloneVM_Task, 'duration_secs': 1.38849} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.950208] env[69927]: INFO nova.virt.vmwareapi.vmops [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Created linked-clone VM from snapshot [ 957.952344] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b6c6b57-871f-4be3-b506-6bf7e3400fdd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.960177] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Uploading image cb041012-44ce-40ce-ba24-60376d2f1762 {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 957.963206] env[69927]: DEBUG nova.scheduler.client.report [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 957.984902] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Creating linked-clone VM from snapshot {{(pid=69927) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 957.986754] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d344f076-1c15-4cc4-8ccf-3c3be493fa65 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.996992] env[69927]: DEBUG oslo_vmware.api [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 957.996992] env[69927]: value = "task-4096164" [ 957.996992] env[69927]: _type = "Task" [ 957.996992] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.007074] env[69927]: DEBUG oslo_vmware.api [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096164, 'name': CloneVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.030007] env[69927]: DEBUG nova.virt.hardware [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 958.030286] env[69927]: DEBUG nova.virt.hardware [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 958.030459] env[69927]: DEBUG nova.virt.hardware [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 958.030645] env[69927]: DEBUG nova.virt.hardware [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 958.030792] env[69927]: DEBUG nova.virt.hardware [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 958.030938] env[69927]: DEBUG nova.virt.hardware [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 958.031174] env[69927]: DEBUG nova.virt.hardware [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 958.031336] env[69927]: DEBUG nova.virt.hardware [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 958.031505] env[69927]: DEBUG nova.virt.hardware [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 958.031666] env[69927]: DEBUG nova.virt.hardware [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 958.031837] env[69927]: DEBUG nova.virt.hardware [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 958.033866] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b1f9551-02a9-47ae-b83e-cf60135c463d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.042892] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 958.042892] env[69927]: value = "vm-811475" [ 958.042892] env[69927]: _type = "VirtualMachine" [ 958.042892] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 958.044211] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eec76e58-52b8-4bab-baef-de88ddf42518 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.048828] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-7fe648a3-b230-4085-aad8-2e14e2c28dfd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.064189] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lease: (returnval){ [ 958.064189] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529126d0-9038-3207-674c-6eaddf51580e" [ 958.064189] env[69927]: _type = "HttpNfcLease" [ 958.064189] env[69927]: } obtained for exporting VM: (result){ [ 958.064189] env[69927]: value = "vm-811475" [ 958.064189] env[69927]: _type = "VirtualMachine" [ 958.064189] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 958.064695] env[69927]: DEBUG oslo_vmware.api [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the lease: (returnval){ [ 958.064695] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529126d0-9038-3207-674c-6eaddf51580e" [ 958.064695] env[69927]: _type = "HttpNfcLease" [ 958.064695] env[69927]: } to be ready. {{(pid=69927) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 958.071706] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 958.071706] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529126d0-9038-3207-674c-6eaddf51580e" [ 958.071706] env[69927]: _type = "HttpNfcLease" [ 958.071706] env[69927]: } is ready. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 958.072049] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 958.072049] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529126d0-9038-3207-674c-6eaddf51580e" [ 958.072049] env[69927]: _type = "HttpNfcLease" [ 958.072049] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 958.074019] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c503fa-426a-4a92-af3b-73a59a2ca421 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.081490] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523ddd5e-3c97-452e-55f3-fae637140715/disk-0.vmdk from lease info. {{(pid=69927) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 958.081717] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523ddd5e-3c97-452e-55f3-fae637140715/disk-0.vmdk for reading. {{(pid=69927) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 958.180865] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4b27dfe1-c7a6-4e98-aa31-bb392f70d4d7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.456648] env[69927]: INFO nova.compute.manager [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] instance snapshotting [ 958.463750] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-606ac26d-db11-4f06-9cc2-cd2233d68e66 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.469202] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.968s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.494758] env[69927]: DEBUG oslo_concurrency.lockutils [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 50.877s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.495067] env[69927]: DEBUG nova.objects.instance [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69927) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 958.499759] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75235452-49de-44e6-92bb-bd6f73dcbac1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.507328] env[69927]: INFO nova.scheduler.client.report [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Deleted allocations for instance 8442f144-2be4-4634-b151-62f049a975b6 [ 958.518048] env[69927]: DEBUG nova.virt.hardware [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 958.518048] env[69927]: DEBUG nova.virt.hardware [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 958.518048] env[69927]: DEBUG nova.virt.hardware [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 958.518048] env[69927]: DEBUG nova.virt.hardware [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 958.518048] env[69927]: DEBUG nova.virt.hardware [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 958.518048] env[69927]: DEBUG nova.virt.hardware [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 958.518048] env[69927]: DEBUG nova.virt.hardware [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 958.518048] env[69927]: DEBUG nova.virt.hardware [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 958.518048] env[69927]: DEBUG nova.virt.hardware [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 958.518048] env[69927]: DEBUG nova.virt.hardware [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 958.518048] env[69927]: DEBUG nova.virt.hardware [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 958.522590] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b116942-5d96-4a2c-9de5-4cd9109cf587 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.525368] env[69927]: DEBUG oslo_vmware.api [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096164, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.534272] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a2a0ac-2b21-4b87-ab5c-1afb79525617 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.550031] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Instance VIF info [] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 958.557547] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 958.558797] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 958.559189] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7ad6913f-7432-480c-b25a-3a4c3b2b2355 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.583657] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 958.583657] env[69927]: value = "task-4096166" [ 958.583657] env[69927]: _type = "Task" [ 958.583657] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.593554] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096166, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.596083] env[69927]: DEBUG nova.compute.manager [req-6c6fd2d2-30c2-4894-a341-bc5e2f083360 req-3bdb964b-0c2a-4eaa-a83d-11a35f978951 service nova] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Received event network-vif-plugged-fc4d69e0-0a53-4c34-8f56-6416a884b018 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 958.596368] env[69927]: DEBUG oslo_concurrency.lockutils [req-6c6fd2d2-30c2-4894-a341-bc5e2f083360 req-3bdb964b-0c2a-4eaa-a83d-11a35f978951 service nova] Acquiring lock "5581f8af-9796-48ad-a2f3-557e90d9662a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 958.596672] env[69927]: DEBUG oslo_concurrency.lockutils [req-6c6fd2d2-30c2-4894-a341-bc5e2f083360 req-3bdb964b-0c2a-4eaa-a83d-11a35f978951 service nova] Lock "5581f8af-9796-48ad-a2f3-557e90d9662a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.596928] env[69927]: DEBUG oslo_concurrency.lockutils [req-6c6fd2d2-30c2-4894-a341-bc5e2f083360 req-3bdb964b-0c2a-4eaa-a83d-11a35f978951 service nova] Lock "5581f8af-9796-48ad-a2f3-557e90d9662a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.597303] env[69927]: DEBUG nova.compute.manager [req-6c6fd2d2-30c2-4894-a341-bc5e2f083360 req-3bdb964b-0c2a-4eaa-a83d-11a35f978951 service nova] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] No waiting events found dispatching network-vif-plugged-fc4d69e0-0a53-4c34-8f56-6416a884b018 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 958.597593] env[69927]: WARNING nova.compute.manager [req-6c6fd2d2-30c2-4894-a341-bc5e2f083360 req-3bdb964b-0c2a-4eaa-a83d-11a35f978951 service nova] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Received unexpected event network-vif-plugged-fc4d69e0-0a53-4c34-8f56-6416a884b018 for instance with vm_state building and task_state spawning. [ 958.658662] env[69927]: DEBUG nova.network.neutron [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Successfully updated port: fc4d69e0-0a53-4c34-8f56-6416a884b018 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 959.017887] env[69927]: DEBUG oslo_vmware.api [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096164, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.034807] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Creating Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 959.035947] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f039d20d-dca6-408c-a5f2-cbd7c8654716 tempest-VolumesAssistedSnapshotsTest-131898001 tempest-VolumesAssistedSnapshotsTest-131898001-project-member] Lock "8442f144-2be4-4634-b151-62f049a975b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 56.266s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 959.037016] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f3b4ced5-21f8-4e3f-99cd-b0f766c38c05 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.049491] env[69927]: DEBUG oslo_vmware.api [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 959.049491] env[69927]: value = "task-4096167" [ 959.049491] env[69927]: _type = "Task" [ 959.049491] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.060511] env[69927]: DEBUG oslo_vmware.api [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096167, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.097860] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096166, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.163836] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "refresh_cache-5581f8af-9796-48ad-a2f3-557e90d9662a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.164015] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired lock "refresh_cache-5581f8af-9796-48ad-a2f3-557e90d9662a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 959.164181] env[69927]: DEBUG nova.network.neutron [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 959.516338] env[69927]: DEBUG oslo_concurrency.lockutils [None req-67cc2c33-2243-4dac-a82b-bb07c7326417 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.021s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 959.517535] env[69927]: DEBUG oslo_vmware.api [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096164, 'name': CloneVM_Task, 'duration_secs': 1.487682} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.517804] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 49.626s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.518120] env[69927]: DEBUG nova.objects.instance [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lazy-loading 'resources' on Instance uuid 07484a6c-f9d1-405b-9ae4-a1b830f474ed {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 959.519808] env[69927]: INFO nova.virt.vmwareapi.vmops [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Created linked-clone VM from snapshot [ 959.521019] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ab972bb-c884-4e37-a5e0-aaf3fb6e4d08 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.530688] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Uploading image 3627a0ef-58b1-4de0-bbcb-ab8ee9879d36 {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 959.546227] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Destroying the VM {{(pid=69927) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 959.546227] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b4e82148-2a18-4161-9edf-55e2f4ca0479 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.556754] env[69927]: DEBUG oslo_vmware.api [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 959.556754] env[69927]: value = "task-4096168" [ 959.556754] env[69927]: _type = "Task" [ 959.556754] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.564411] env[69927]: DEBUG oslo_vmware.api [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096167, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.574775] env[69927]: DEBUG oslo_vmware.api [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096168, 'name': Destroy_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.597795] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096166, 'name': CreateVM_Task, 'duration_secs': 0.704517} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.598017] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 959.598501] env[69927]: DEBUG oslo_concurrency.lockutils [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.598713] env[69927]: DEBUG oslo_concurrency.lockutils [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 959.599239] env[69927]: DEBUG oslo_concurrency.lockutils [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 959.599553] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea9faba2-a2c6-4b8c-88a9-aa0ba4b67553 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.606208] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Waiting for the task: (returnval){ [ 959.606208] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]527e8ffd-ec9d-6ef2-110f-3f812dee33f9" [ 959.606208] env[69927]: _type = "Task" [ 959.606208] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.625359] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]527e8ffd-ec9d-6ef2-110f-3f812dee33f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.736366] env[69927]: DEBUG nova.network.neutron [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 959.955245] env[69927]: DEBUG nova.network.neutron [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Updating instance_info_cache with network_info: [{"id": "fc4d69e0-0a53-4c34-8f56-6416a884b018", "address": "fa:16:3e:f8:cb:f0", "network": {"id": "625b5644-8a8f-4789-8c96-083982c1bf4c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-124597975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61b1aea0ccf049c8942ba32932412497", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc4d69e0-0a", "ovs_interfaceid": "fc4d69e0-0a53-4c34-8f56-6416a884b018", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.061694] env[69927]: DEBUG oslo_vmware.api [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096167, 'name': CreateSnapshot_Task, 'duration_secs': 0.967513} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.068752] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Created Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 960.070661] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df8067a-b277-4f88-9a8f-cb1a25dbce75 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.079571] env[69927]: DEBUG oslo_vmware.api [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096168, 'name': Destroy_Task} progress is 33%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.124166] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]527e8ffd-ec9d-6ef2-110f-3f812dee33f9, 'name': SearchDatastore_Task, 'duration_secs': 0.016236} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.125916] env[69927]: DEBUG oslo_concurrency.lockutils [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.126255] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 960.126562] env[69927]: DEBUG oslo_concurrency.lockutils [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.126735] env[69927]: DEBUG oslo_concurrency.lockutils [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.126961] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 960.127567] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce13ab73-e9c8-4c7b-b3e7-9bfa9df77e01 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.139402] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 960.139659] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 960.144499] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15b8e00f-6628-4a94-8658-64b1126870d3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.153752] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Waiting for the task: (returnval){ [ 960.153752] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522307fb-d652-2dff-a86d-25b73c59f75d" [ 960.153752] env[69927]: _type = "Task" [ 960.153752] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.166140] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522307fb-d652-2dff-a86d-25b73c59f75d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.458908] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Releasing lock "refresh_cache-5581f8af-9796-48ad-a2f3-557e90d9662a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.459269] env[69927]: DEBUG nova.compute.manager [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Instance network_info: |[{"id": "fc4d69e0-0a53-4c34-8f56-6416a884b018", "address": "fa:16:3e:f8:cb:f0", "network": {"id": "625b5644-8a8f-4789-8c96-083982c1bf4c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-124597975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61b1aea0ccf049c8942ba32932412497", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc4d69e0-0a", "ovs_interfaceid": "fc4d69e0-0a53-4c34-8f56-6416a884b018", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 960.462669] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:cb:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '415e68b4-3766-4359-afe2-f8563910d98c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fc4d69e0-0a53-4c34-8f56-6416a884b018', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 960.473628] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 960.474337] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 960.474713] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-de7fdd11-a7a2-4d98-ad6a-a65b3ca6b914 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.503181] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 960.503181] env[69927]: value = "task-4096169" [ 960.503181] env[69927]: _type = "Task" [ 960.503181] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.515488] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096169, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.578486] env[69927]: DEBUG oslo_vmware.api [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096168, 'name': Destroy_Task, 'duration_secs': 0.603877} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.580712] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Destroyed the VM [ 960.581699] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Deleting Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 960.581699] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ea74898e-cde4-4ec0-b510-2258b9cf1f88 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.597331] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Creating linked-clone VM from snapshot {{(pid=69927) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 960.602656] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3e3975d6-0f94-493c-ab33-8652acd0e3e9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.606649] env[69927]: DEBUG oslo_vmware.api [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 960.606649] env[69927]: value = "task-4096170" [ 960.606649] env[69927]: _type = "Task" [ 960.606649] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.613654] env[69927]: DEBUG oslo_vmware.api [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 960.613654] env[69927]: value = "task-4096171" [ 960.613654] env[69927]: _type = "Task" [ 960.613654] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.621202] env[69927]: DEBUG oslo_vmware.api [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096170, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.631042] env[69927]: DEBUG oslo_vmware.api [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096171, 'name': CloneVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.668781] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522307fb-d652-2dff-a86d-25b73c59f75d, 'name': SearchDatastore_Task, 'duration_secs': 0.015315} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.672730] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1695eb8c-7eaf-4c53-b94f-394ebbcd2eea {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.681420] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Waiting for the task: (returnval){ [ 960.681420] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5228c7d9-214d-05f8-b1d8-c75de82587fd" [ 960.681420] env[69927]: _type = "Task" [ 960.681420] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.685231] env[69927]: DEBUG nova.compute.manager [req-b9ab41c8-4a4d-4011-bad3-87ef5510ed81 req-138b4a6d-dace-4b37-ad74-06f2e25cc077 service nova] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Received event network-changed-fc4d69e0-0a53-4c34-8f56-6416a884b018 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 960.685701] env[69927]: DEBUG nova.compute.manager [req-b9ab41c8-4a4d-4011-bad3-87ef5510ed81 req-138b4a6d-dace-4b37-ad74-06f2e25cc077 service nova] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Refreshing instance network info cache due to event network-changed-fc4d69e0-0a53-4c34-8f56-6416a884b018. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 960.685972] env[69927]: DEBUG oslo_concurrency.lockutils [req-b9ab41c8-4a4d-4011-bad3-87ef5510ed81 req-138b4a6d-dace-4b37-ad74-06f2e25cc077 service nova] Acquiring lock "refresh_cache-5581f8af-9796-48ad-a2f3-557e90d9662a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.686310] env[69927]: DEBUG oslo_concurrency.lockutils [req-b9ab41c8-4a4d-4011-bad3-87ef5510ed81 req-138b4a6d-dace-4b37-ad74-06f2e25cc077 service nova] Acquired lock "refresh_cache-5581f8af-9796-48ad-a2f3-557e90d9662a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.686533] env[69927]: DEBUG nova.network.neutron [req-b9ab41c8-4a4d-4011-bad3-87ef5510ed81 req-138b4a6d-dace-4b37-ad74-06f2e25cc077 service nova] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Refreshing network info cache for port fc4d69e0-0a53-4c34-8f56-6416a884b018 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 960.696772] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac64c23e-d65a-4497-8473-0106be551a01 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.717864] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd34eb1-451e-424a-9fda-d0d18e548abe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.727682] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5228c7d9-214d-05f8-b1d8-c75de82587fd, 'name': SearchDatastore_Task, 'duration_secs': 0.013778} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.727682] env[69927]: DEBUG oslo_concurrency.lockutils [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.727682] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 27e20d58-1150-4b90-b888-d84aff1954ef/27e20d58-1150-4b90-b888-d84aff1954ef.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 960.727682] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eb05d4af-36e9-42c4-b4c0-b5b6b2113f47 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.763034] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f7b06f-2420-47c1-9a3e-879ff2b0c2ca {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.769154] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Waiting for the task: (returnval){ [ 960.769154] env[69927]: value = "task-4096172" [ 960.769154] env[69927]: _type = "Task" [ 960.769154] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.779839] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3330996-5597-40de-8316-cae6570b2ffe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.792561] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096172, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.803324] env[69927]: DEBUG nova.compute.provider_tree [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 961.015923] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096169, 'name': CreateVM_Task, 'duration_secs': 0.451034} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.016174] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 961.017100] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.017362] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.017783] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 961.017907] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2e98929-eed2-4165-bcc4-dd8d40b3ae00 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.024168] env[69927]: DEBUG oslo_vmware.api [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 961.024168] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52804d81-e39f-50ec-97eb-8ee1674d4f05" [ 961.024168] env[69927]: _type = "Task" [ 961.024168] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.034615] env[69927]: DEBUG oslo_vmware.api [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52804d81-e39f-50ec-97eb-8ee1674d4f05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.124045] env[69927]: DEBUG oslo_vmware.api [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096170, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.131862] env[69927]: DEBUG oslo_vmware.api [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096171, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.283870] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096172, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.310160] env[69927]: DEBUG nova.scheduler.client.report [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 961.396558] env[69927]: DEBUG nova.network.neutron [req-b9ab41c8-4a4d-4011-bad3-87ef5510ed81 req-138b4a6d-dace-4b37-ad74-06f2e25cc077 service nova] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Updated VIF entry in instance network info cache for port fc4d69e0-0a53-4c34-8f56-6416a884b018. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 961.397029] env[69927]: DEBUG nova.network.neutron [req-b9ab41c8-4a4d-4011-bad3-87ef5510ed81 req-138b4a6d-dace-4b37-ad74-06f2e25cc077 service nova] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Updating instance_info_cache with network_info: [{"id": "fc4d69e0-0a53-4c34-8f56-6416a884b018", "address": "fa:16:3e:f8:cb:f0", "network": {"id": "625b5644-8a8f-4789-8c96-083982c1bf4c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-124597975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61b1aea0ccf049c8942ba32932412497", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc4d69e0-0a", "ovs_interfaceid": "fc4d69e0-0a53-4c34-8f56-6416a884b018", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.537811] env[69927]: DEBUG oslo_vmware.api [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52804d81-e39f-50ec-97eb-8ee1674d4f05, 'name': SearchDatastore_Task, 'duration_secs': 0.063006} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.537811] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 961.537811] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 961.538238] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.538510] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.538760] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 961.539623] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6ebcfbfd-a445-439e-bcb1-203e5857f9c9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.551638] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 961.552054] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 961.552983] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a2accbb-4af3-4fc7-a824-889d17020fa9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.560994] env[69927]: DEBUG oslo_vmware.api [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 961.560994] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520bd860-f2f5-65c0-df55-dae4aa3c12d9" [ 961.560994] env[69927]: _type = "Task" [ 961.560994] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.571045] env[69927]: DEBUG oslo_vmware.api [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520bd860-f2f5-65c0-df55-dae4aa3c12d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.618609] env[69927]: DEBUG oslo_vmware.api [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096170, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.633123] env[69927]: DEBUG oslo_vmware.api [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096171, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.783472] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096172, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.611925} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.783747] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 27e20d58-1150-4b90-b888-d84aff1954ef/27e20d58-1150-4b90-b888-d84aff1954ef.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 961.783982] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 961.784284] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8c37bac7-6f50-4828-a45e-08458573f379 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.793101] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Waiting for the task: (returnval){ [ 961.793101] env[69927]: value = "task-4096173" [ 961.793101] env[69927]: _type = "Task" [ 961.793101] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.803414] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096173, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.816643] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.299s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 961.819199] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 50.957s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 961.819462] env[69927]: DEBUG nova.objects.instance [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Lazy-loading 'resources' on Instance uuid aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 961.850391] env[69927]: INFO nova.scheduler.client.report [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Deleted allocations for instance 07484a6c-f9d1-405b-9ae4-a1b830f474ed [ 961.900337] env[69927]: DEBUG oslo_concurrency.lockutils [req-b9ab41c8-4a4d-4011-bad3-87ef5510ed81 req-138b4a6d-dace-4b37-ad74-06f2e25cc077 service nova] Releasing lock "refresh_cache-5581f8af-9796-48ad-a2f3-557e90d9662a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.072351] env[69927]: DEBUG oslo_vmware.api [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520bd860-f2f5-65c0-df55-dae4aa3c12d9, 'name': SearchDatastore_Task, 'duration_secs': 0.017828} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.073365] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7882f21f-c090-4cc6-9cbc-5ac488a77b66 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.081189] env[69927]: DEBUG oslo_vmware.api [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 962.081189] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5267a2ef-5191-e4c6-3029-0f1fb7083307" [ 962.081189] env[69927]: _type = "Task" [ 962.081189] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.093466] env[69927]: DEBUG oslo_vmware.api [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5267a2ef-5191-e4c6-3029-0f1fb7083307, 'name': SearchDatastore_Task, 'duration_secs': 0.01081} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.093779] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.094051] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 5581f8af-9796-48ad-a2f3-557e90d9662a/5581f8af-9796-48ad-a2f3-557e90d9662a.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 962.094412] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-32776395-ff07-4f21-b747-2a26311bd5ce {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.102215] env[69927]: DEBUG oslo_vmware.api [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 962.102215] env[69927]: value = "task-4096174" [ 962.102215] env[69927]: _type = "Task" [ 962.102215] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.116614] env[69927]: DEBUG oslo_vmware.api [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096174, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.124864] env[69927]: DEBUG oslo_vmware.api [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096170, 'name': RemoveSnapshot_Task, 'duration_secs': 1.130178} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.125569] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Deleted Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 962.131371] env[69927]: DEBUG oslo_vmware.api [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096171, 'name': CloneVM_Task, 'duration_secs': 1.456431} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.131843] env[69927]: INFO nova.virt.vmwareapi.vmops [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Created linked-clone VM from snapshot [ 962.133501] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfed0c6c-465c-47d6-9839-610e5da813aa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.149229] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Uploading image c87f5a71-3793-40fd-ad59-7b537237d25e {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 962.194335] env[69927]: DEBUG oslo_vmware.rw_handles [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 962.194335] env[69927]: value = "vm-811481" [ 962.194335] env[69927]: _type = "VirtualMachine" [ 962.194335] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 962.195326] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-42c3ce64-f5d0-4794-a91c-5eaea0b3d677 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.206342] env[69927]: DEBUG oslo_vmware.rw_handles [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lease: (returnval){ [ 962.206342] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5255f0e4-eb2c-711e-7f70-6921d529c1c7" [ 962.206342] env[69927]: _type = "HttpNfcLease" [ 962.206342] env[69927]: } obtained for exporting VM: (result){ [ 962.206342] env[69927]: value = "vm-811481" [ 962.206342] env[69927]: _type = "VirtualMachine" [ 962.206342] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 962.206796] env[69927]: DEBUG oslo_vmware.api [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the lease: (returnval){ [ 962.206796] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5255f0e4-eb2c-711e-7f70-6921d529c1c7" [ 962.206796] env[69927]: _type = "HttpNfcLease" [ 962.206796] env[69927]: } to be ready. {{(pid=69927) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 962.218531] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 962.218531] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5255f0e4-eb2c-711e-7f70-6921d529c1c7" [ 962.218531] env[69927]: _type = "HttpNfcLease" [ 962.218531] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 962.306026] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096173, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074924} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.306220] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 962.307189] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f370c9c9-5542-4844-966d-ff87b7e09151 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.334386] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] 27e20d58-1150-4b90-b888-d84aff1954ef/27e20d58-1150-4b90-b888-d84aff1954ef.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 962.335723] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b9510c4-f837-4b49-b474-8d7369048d57 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.359315] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Waiting for the task: (returnval){ [ 962.359315] env[69927]: value = "task-4096176" [ 962.359315] env[69927]: _type = "Task" [ 962.359315] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.359925] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d5c2e27f-7947-4a74-bd23-32983dcdd92c tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "07484a6c-f9d1-405b-9ae4-a1b830f474ed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 56.114s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.375283] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096176, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.615064] env[69927]: DEBUG oslo_vmware.api [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096174, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.636644] env[69927]: WARNING nova.compute.manager [None req-59f5135b-eacb-4a00-ad36-a09da0be9e28 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Image not found during snapshot: nova.exception.ImageNotFound: Image 3627a0ef-58b1-4de0-bbcb-ab8ee9879d36 could not be found. [ 962.720208] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 962.720208] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5255f0e4-eb2c-711e-7f70-6921d529c1c7" [ 962.720208] env[69927]: _type = "HttpNfcLease" [ 962.720208] env[69927]: } is ready. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 962.720208] env[69927]: DEBUG oslo_vmware.rw_handles [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 962.720208] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5255f0e4-eb2c-711e-7f70-6921d529c1c7" [ 962.720208] env[69927]: _type = "HttpNfcLease" [ 962.720208] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 962.720208] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f924b3-5e71-4a68-baf9-b6e9e630f81f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.729399] env[69927]: DEBUG oslo_vmware.rw_handles [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527eeff3-6c0a-fdae-802f-14d51644bb7d/disk-0.vmdk from lease info. {{(pid=69927) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 962.730264] env[69927]: DEBUG oslo_vmware.rw_handles [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527eeff3-6c0a-fdae-802f-14d51644bb7d/disk-0.vmdk for reading. {{(pid=69927) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 962.856411] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-da23679b-454c-4161-8524-1b093164d5e5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.875180] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096176, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.053654] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40ad24d3-ed2d-42a2-88cc-8501926e564d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.061861] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a8abf28-d84c-43a2-a62b-48414c0f1e56 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.095974] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e656492-d0e7-44ef-a725-e0061e4a69db {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.109092] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abfc0d5e-ac23-4156-b43b-87ac3d6e7319 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.128478] env[69927]: DEBUG nova.compute.provider_tree [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 963.134231] env[69927]: DEBUG oslo_vmware.api [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096174, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.63691} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.135148] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 5581f8af-9796-48ad-a2f3-557e90d9662a/5581f8af-9796-48ad-a2f3-557e90d9662a.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 963.135374] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 963.135709] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a2160613-20d6-404a-8a0f-921d5af423d3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.144762] env[69927]: DEBUG oslo_vmware.api [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 963.144762] env[69927]: value = "task-4096178" [ 963.144762] env[69927]: _type = "Task" [ 963.144762] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.155255] env[69927]: DEBUG oslo_vmware.api [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096178, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.378120] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096176, 'name': ReconfigVM_Task, 'duration_secs': 0.643151} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.379000] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Reconfigured VM instance instance-00000040 to attach disk [datastore2] 27e20d58-1150-4b90-b888-d84aff1954ef/27e20d58-1150-4b90-b888-d84aff1954ef.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 963.379411] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7221fa23-d9dc-4cab-95ad-efb9c22af77e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.390259] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Waiting for the task: (returnval){ [ 963.390259] env[69927]: value = "task-4096179" [ 963.390259] env[69927]: _type = "Task" [ 963.390259] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.403822] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096179, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.658040] env[69927]: DEBUG oslo_vmware.api [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096178, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079212} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.658595] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 963.659375] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded86a9c-ceaf-4bcc-bd90-6cef6bb9cc0c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.689746] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 5581f8af-9796-48ad-a2f3-557e90d9662a/5581f8af-9796-48ad-a2f3-557e90d9662a.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 963.691057] env[69927]: DEBUG nova.scheduler.client.report [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Updated inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 with generation 100 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 963.691440] env[69927]: DEBUG nova.compute.provider_tree [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Updating resource provider 2f529b36-df5f-4b37-8103-68f74f737726 generation from 100 to 101 during operation: update_inventory {{(pid=69927) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 963.691660] env[69927]: DEBUG nova.compute.provider_tree [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 963.695435] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3155ba88-7f2a-4602-bdff-8144fff86af5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.718730] env[69927]: DEBUG oslo_vmware.api [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 963.718730] env[69927]: value = "task-4096180" [ 963.718730] env[69927]: _type = "Task" [ 963.718730] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.731806] env[69927]: DEBUG oslo_vmware.api [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096180, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.905125] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096179, 'name': Rename_Task, 'duration_secs': 0.230779} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.905719] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 963.906582] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f6d5b897-1d2a-46b3-a15e-84160f71a9da {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.916521] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Waiting for the task: (returnval){ [ 963.916521] env[69927]: value = "task-4096181" [ 963.916521] env[69927]: _type = "Task" [ 963.916521] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.933608] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096181, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.009233] env[69927]: DEBUG oslo_concurrency.lockutils [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquiring lock "30d9d1ac-4be0-4723-86b5-0aceda88e67b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.009658] env[69927]: DEBUG oslo_concurrency.lockutils [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "30d9d1ac-4be0-4723-86b5-0aceda88e67b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.009971] env[69927]: DEBUG oslo_concurrency.lockutils [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquiring lock "30d9d1ac-4be0-4723-86b5-0aceda88e67b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.010288] env[69927]: DEBUG oslo_concurrency.lockutils [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "30d9d1ac-4be0-4723-86b5-0aceda88e67b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.010531] env[69927]: DEBUG oslo_concurrency.lockutils [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "30d9d1ac-4be0-4723-86b5-0aceda88e67b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.014944] env[69927]: INFO nova.compute.manager [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Terminating instance [ 964.215131] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.394s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.216659] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 53.270s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.237443] env[69927]: DEBUG oslo_vmware.api [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096180, 'name': ReconfigVM_Task, 'duration_secs': 0.392333} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.238130] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 5581f8af-9796-48ad-a2f3-557e90d9662a/5581f8af-9796-48ad-a2f3-557e90d9662a.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 964.239133] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8fb777a9-9e6c-47d4-b5a4-9c126bd2adc1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.249036] env[69927]: DEBUG oslo_vmware.api [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 964.249036] env[69927]: value = "task-4096182" [ 964.249036] env[69927]: _type = "Task" [ 964.249036] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.259470] env[69927]: INFO nova.scheduler.client.report [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Deleted allocations for instance aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4 [ 964.267192] env[69927]: DEBUG oslo_vmware.api [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096182, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.433458] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096181, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.522732] env[69927]: DEBUG nova.compute.manager [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 964.522732] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 964.523639] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f126e9ac-0f68-4dbe-b1b5-1733cd48cbb7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.532909] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 964.533280] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-81bcac6e-b7ab-4fd6-8d0e-5a4d2b4ce200 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.541093] env[69927]: DEBUG oslo_vmware.api [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 964.541093] env[69927]: value = "task-4096183" [ 964.541093] env[69927]: _type = "Task" [ 964.541093] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.550361] env[69927]: DEBUG oslo_vmware.api [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096183, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.766331] env[69927]: DEBUG oslo_vmware.api [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096182, 'name': Rename_Task, 'duration_secs': 0.17637} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.766331] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 964.766331] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-251468e6-45a7-4dcf-a5e5-71614e7909b8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.781424] env[69927]: DEBUG oslo_vmware.api [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 964.781424] env[69927]: value = "task-4096184" [ 964.781424] env[69927]: _type = "Task" [ 964.781424] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.781424] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2c3f462b-c29e-4a9a-ae03-243d2b231ef1 tempest-ServerMetadataTestJSON-1221837469 tempest-ServerMetadataTestJSON-1221837469-project-member] Lock "aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 57.581s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.794167] env[69927]: DEBUG oslo_vmware.api [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096184, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.928707] env[69927]: DEBUG oslo_vmware.api [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096181, 'name': PowerOnVM_Task, 'duration_secs': 0.838523} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.929033] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 964.929241] env[69927]: DEBUG nova.compute.manager [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 964.930302] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d675bfa-fd6c-44b5-85fa-8adeb5a3a3cd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.053710] env[69927]: DEBUG oslo_vmware.api [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096183, 'name': PowerOffVM_Task, 'duration_secs': 0.317109} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.054020] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 965.054229] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 965.054499] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c23192c0-8fcf-4203-b7c1-ea6331d9d4ae {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.136033] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 965.136353] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 965.136583] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Deleting the datastore file [datastore1] 30d9d1ac-4be0-4723-86b5-0aceda88e67b {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 965.136978] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26cb6502-e782-4669-a3a8-510834308e3c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.145859] env[69927]: DEBUG oslo_vmware.api [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 965.145859] env[69927]: value = "task-4096186" [ 965.145859] env[69927]: _type = "Task" [ 965.145859] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.157756] env[69927]: DEBUG oslo_vmware.api [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096186, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.239611] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Applying migration context for instance c3e8a429-8484-4b11-abe3-1cccf0992556 as it has an incoming, in-progress migration 640acec6-f769-4d56-a7aa-84655b85e53f. Migration status is reverting {{(pid=69927) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1046}} [ 965.242360] env[69927]: INFO nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Updating resource usage from migration 640acec6-f769-4d56-a7aa-84655b85e53f [ 965.272118] env[69927]: WARNING nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 77c6ce9e-5e15-41e4-aa81-1ef01248aa32 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 965.272359] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 9348e368-cc3c-4bde-91ae-26fd03ad536a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 965.272508] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 95c02aa2-d587-4c9f-9b02-2992dfe5b1be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 965.272633] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 256319c4-817d-4267-8531-a65f0f8cd0b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 965.272778] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 21b7b237-557e-4030-93bb-6b5ce417e53c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 965.272940] env[69927]: WARNING nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance ff227e07-8e36-48d6-a8c7-1e0087fd1faa is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 965.273101] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance e0bca101-cf8d-48e1-a331-b0018548593e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 965.273243] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance e1b3d0bc-a251-4dbd-89a6-216a2f2c1313 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 965.273361] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance a2b1684f-82af-42fc-925e-db36f31cfe63 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 965.273504] env[69927]: WARNING nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance cb35090d-bfd2-46df-8ee5-d9b068ba0a28 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 965.273636] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance a9a62523-50fb-44b2-bfc8-9c6664dbf050 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 965.273794] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance c3a531fd-647c-43b6-9d3d-fc6ecbc2445e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 965.273893] env[69927]: WARNING nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance e1946033-4ec3-4561-afdf-a3b748f7c611 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 965.274214] env[69927]: WARNING nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 9363c664-5848-408b-9b03-2dea4ceded90 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 965.274389] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 50eedb80-d4bc-42c4-9686-6549cbd675b7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 965.274543] env[69927]: WARNING nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 8be7e64c-7bc6-41a0-ada5-0a5057a2af45 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 965.274695] env[69927]: WARNING nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 358ecaef-37f0-42be-acce-00f389650c97 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 965.274814] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 30d9d1ac-4be0-4723-86b5-0aceda88e67b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 965.274929] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Migration 640acec6-f769-4d56-a7aa-84655b85e53f is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1742}} [ 965.275071] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance c3e8a429-8484-4b11-abe3-1cccf0992556 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 965.275189] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 44e81156-b0c7-4f68-9732-b39f41ebcd4b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 965.275318] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 27e20d58-1150-4b90-b888-d84aff1954ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 965.275439] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 5581f8af-9796-48ad-a2f3-557e90d9662a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 965.293008] env[69927]: DEBUG oslo_vmware.api [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096184, 'name': PowerOnVM_Task} progress is 78%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.450738] env[69927]: DEBUG oslo_concurrency.lockutils [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 965.658348] env[69927]: DEBUG oslo_vmware.api [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096186, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171671} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.658672] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 965.658894] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 965.659169] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 965.659445] env[69927]: INFO nova.compute.manager [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 965.659728] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 965.659928] env[69927]: DEBUG nova.compute.manager [-] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 965.660039] env[69927]: DEBUG nova.network.neutron [-] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 965.779336] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 67e00c40-35b6-4a9f-9505-19b804e78c04 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 965.793897] env[69927]: DEBUG oslo_vmware.api [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096184, 'name': PowerOnVM_Task, 'duration_secs': 0.803191} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.794645] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 965.794917] env[69927]: INFO nova.compute.manager [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Took 7.87 seconds to spawn the instance on the hypervisor. [ 965.795146] env[69927]: DEBUG nova.compute.manager [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 965.796313] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c4e694a-5d34-4a2e-ab1c-d78c527faca0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.078067] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523ddd5e-3c97-452e-55f3-fae637140715/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 966.078564] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b693985f-9f92-4029-9ca0-4bb27b79c54b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.086262] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523ddd5e-3c97-452e-55f3-fae637140715/disk-0.vmdk is in state: ready. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 966.086262] env[69927]: ERROR oslo_vmware.rw_handles [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523ddd5e-3c97-452e-55f3-fae637140715/disk-0.vmdk due to incomplete transfer. [ 966.088832] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a6e2d59a-2689-4fc5-9e3b-52293dfbe071 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.095084] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523ddd5e-3c97-452e-55f3-fae637140715/disk-0.vmdk. {{(pid=69927) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 966.095297] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Uploaded image cb041012-44ce-40ce-ba24-60376d2f1762 to the Glance image server {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 966.097494] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Destroying the VM {{(pid=69927) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 966.097770] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f2264bef-e7a8-4cac-8133-f668e1ac3a63 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.104581] env[69927]: DEBUG oslo_vmware.api [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 966.104581] env[69927]: value = "task-4096187" [ 966.104581] env[69927]: _type = "Task" [ 966.104581] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.114872] env[69927]: DEBUG oslo_vmware.api [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096187, 'name': Destroy_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.282315] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 01c8eb3b-bf30-4b00-af71-e32f0dc19171 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 966.323740] env[69927]: INFO nova.compute.manager [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Took 61.80 seconds to build instance. [ 966.619196] env[69927]: DEBUG oslo_vmware.api [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096187, 'name': Destroy_Task, 'duration_secs': 0.425618} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.619581] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Destroyed the VM [ 966.619827] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Deleting Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 966.620105] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-15f06038-1e58-412a-add0-79b3038f3446 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.627918] env[69927]: DEBUG oslo_vmware.api [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 966.627918] env[69927]: value = "task-4096188" [ 966.627918] env[69927]: _type = "Task" [ 966.627918] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.639703] env[69927]: DEBUG oslo_vmware.api [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096188, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.711501] env[69927]: DEBUG nova.network.neutron [-] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.787485] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 74ea584f-b20f-425b-acb3-0ec60e7f2a1e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 966.828092] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f31b36d3-b49b-4d9b-9621-34810fb327b8 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "5581f8af-9796-48ad-a2f3-557e90d9662a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.856s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.855658] env[69927]: DEBUG nova.compute.manager [req-33678487-aaa0-44be-982b-9a94303d46fd req-7914fdf6-2124-447f-bf30-7b69f1d8c906 service nova] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Received event network-vif-deleted-0d5e4812-8ef2-4a9f-92e9-29113e1eb77e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 966.875347] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquiring lock "95c02aa2-d587-4c9f-9b02-2992dfe5b1be" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 966.875601] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "95c02aa2-d587-4c9f-9b02-2992dfe5b1be" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.875831] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquiring lock "95c02aa2-d587-4c9f-9b02-2992dfe5b1be-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 966.876050] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "95c02aa2-d587-4c9f-9b02-2992dfe5b1be-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.876287] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "95c02aa2-d587-4c9f-9b02-2992dfe5b1be-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.878908] env[69927]: INFO nova.compute.manager [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Terminating instance [ 966.970795] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Acquiring lock "27e20d58-1150-4b90-b888-d84aff1954ef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 966.971158] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Lock "27e20d58-1150-4b90-b888-d84aff1954ef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.971440] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Acquiring lock "27e20d58-1150-4b90-b888-d84aff1954ef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 966.971601] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Lock "27e20d58-1150-4b90-b888-d84aff1954ef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.971796] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Lock "27e20d58-1150-4b90-b888-d84aff1954ef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.974096] env[69927]: INFO nova.compute.manager [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Terminating instance [ 967.141672] env[69927]: DEBUG oslo_vmware.api [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096188, 'name': RemoveSnapshot_Task, 'duration_secs': 0.386467} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.142328] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Deleted Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 967.142740] env[69927]: DEBUG nova.compute.manager [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 967.143684] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e986d262-4746-42dc-a656-332463ba4ad8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.215064] env[69927]: INFO nova.compute.manager [-] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Took 1.55 seconds to deallocate network for instance. [ 967.289597] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance c87680be-227e-4a3e-92d3-c2310623bfe4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 967.331719] env[69927]: DEBUG nova.compute.manager [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 967.384859] env[69927]: DEBUG nova.compute.manager [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 967.385110] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 967.386095] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-390c123f-0f2a-4592-9cbb-60a46f0daf34 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.400689] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 967.400992] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-43c4599f-3429-4807-a78d-ec7457978cca {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.411020] env[69927]: DEBUG oslo_vmware.api [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for the task: (returnval){ [ 967.411020] env[69927]: value = "task-4096189" [ 967.411020] env[69927]: _type = "Task" [ 967.411020] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.420847] env[69927]: DEBUG oslo_vmware.api [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4096189, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.481015] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Acquiring lock "refresh_cache-27e20d58-1150-4b90-b888-d84aff1954ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.481266] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Acquired lock "refresh_cache-27e20d58-1150-4b90-b888-d84aff1954ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 967.481474] env[69927]: DEBUG nova.network.neutron [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 967.665482] env[69927]: INFO nova.compute.manager [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Shelve offloading [ 967.727229] env[69927]: DEBUG oslo_concurrency.lockutils [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.793981] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 967.864370] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.927421] env[69927]: DEBUG oslo_vmware.api [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4096189, 'name': PowerOffVM_Task, 'duration_secs': 0.210128} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.927850] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 967.928929] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 967.928929] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-80a5be73-36e8-416b-8305-e7cbb43668f1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.021514] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 968.021755] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 968.021944] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Deleting the datastore file [datastore1] 95c02aa2-d587-4c9f-9b02-2992dfe5b1be {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 968.022233] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ade81a60-a642-4081-91e6-6ea948db86e9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.025008] env[69927]: DEBUG nova.network.neutron [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 968.032642] env[69927]: DEBUG oslo_vmware.api [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for the task: (returnval){ [ 968.032642] env[69927]: value = "task-4096191" [ 968.032642] env[69927]: _type = "Task" [ 968.032642] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.041991] env[69927]: DEBUG oslo_vmware.api [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4096191, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.132403] env[69927]: DEBUG nova.network.neutron [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.174303] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 968.174556] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ed21db24-0b4b-46ec-8cbb-20d542276d9d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.183805] env[69927]: DEBUG oslo_vmware.api [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 968.183805] env[69927]: value = "task-4096192" [ 968.183805] env[69927]: _type = "Task" [ 968.183805] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.198246] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] VM already powered off {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 968.198467] env[69927]: DEBUG nova.compute.manager [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 968.199368] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97a8097c-bdeb-4ba6-bd1d-3846b89baae9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.208844] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "refresh_cache-a2b1684f-82af-42fc-925e-db36f31cfe63" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.209068] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquired lock "refresh_cache-a2b1684f-82af-42fc-925e-db36f31cfe63" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.209214] env[69927]: DEBUG nova.network.neutron [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 968.301195] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 9c00e485-fd59-4571-abd5-80ca5e3bac1b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 968.546475] env[69927]: DEBUG oslo_vmware.api [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Task: {'id': task-4096191, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.246939} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.547197] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 968.547505] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 968.547726] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 968.547917] env[69927]: INFO nova.compute.manager [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Took 1.16 seconds to destroy the instance on the hypervisor. [ 968.548184] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 968.549013] env[69927]: DEBUG nova.compute.manager [-] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 968.549149] env[69927]: DEBUG nova.network.neutron [-] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 968.637411] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Releasing lock "refresh_cache-27e20d58-1150-4b90-b888-d84aff1954ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 968.638986] env[69927]: DEBUG nova.compute.manager [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 968.639351] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 968.641176] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa98f54-299a-435a-acf2-3b8a8ed39766 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.651187] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 968.652069] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a9af3fed-80ab-4cd5-97a1-7010ae59843f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.659776] env[69927]: DEBUG oslo_vmware.api [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Waiting for the task: (returnval){ [ 968.659776] env[69927]: value = "task-4096193" [ 968.659776] env[69927]: _type = "Task" [ 968.659776] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.668694] env[69927]: DEBUG oslo_vmware.api [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096193, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.805810] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 968.995020] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Acquiring lock "50eedb80-d4bc-42c4-9686-6549cbd675b7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.995360] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Lock "50eedb80-d4bc-42c4-9686-6549cbd675b7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.995588] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Acquiring lock "50eedb80-d4bc-42c4-9686-6549cbd675b7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.995771] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Lock "50eedb80-d4bc-42c4-9686-6549cbd675b7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.995946] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Lock "50eedb80-d4bc-42c4-9686-6549cbd675b7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.999451] env[69927]: INFO nova.compute.manager [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Terminating instance [ 969.175302] env[69927]: DEBUG oslo_vmware.api [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096193, 'name': PowerOffVM_Task, 'duration_secs': 0.172017} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.175302] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 969.175539] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 969.175804] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e04f1e8b-8b28-4ba0-9542-5c9e76d541cc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.209050] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 969.209388] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 969.209648] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Deleting the datastore file [datastore2] 27e20d58-1150-4b90-b888-d84aff1954ef {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 969.210058] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-535d00d6-9638-45e1-a790-f66a3d435180 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.219498] env[69927]: DEBUG oslo_vmware.api [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Waiting for the task: (returnval){ [ 969.219498] env[69927]: value = "task-4096195" [ 969.219498] env[69927]: _type = "Task" [ 969.219498] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.230320] env[69927]: DEBUG oslo_vmware.api [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096195, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.310198] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 4b7934f8-2c97-480b-8af7-f09f6819e2b6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 969.372101] env[69927]: DEBUG nova.compute.manager [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Stashing vm_state: active {{(pid=69927) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 969.509132] env[69927]: DEBUG nova.compute.manager [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 969.511037] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 969.511037] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97a1d409-ee75-4cad-a437-09e6bb71b371 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.519829] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 969.521550] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b00245a8-f193-47bc-909b-607cb76b9bb9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.527845] env[69927]: DEBUG oslo_vmware.api [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Waiting for the task: (returnval){ [ 969.527845] env[69927]: value = "task-4096196" [ 969.527845] env[69927]: _type = "Task" [ 969.527845] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.537576] env[69927]: DEBUG oslo_vmware.api [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Task: {'id': task-4096196, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.691280] env[69927]: DEBUG nova.network.neutron [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Updating instance_info_cache with network_info: [{"id": "699de5cd-28f8-453d-8f0a-7856f2d6a2b0", "address": "fa:16:3e:87:8d:a8", "network": {"id": "4a19c4d6-f840-486a-b113-3ec076b5b34d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-437129762-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b7ae5270b0643e6b5720d4f2f765d74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "510d3c47-3615-43d5-aa5d-a279fd915e71", "external-id": "nsx-vlan-transportzone-436", "segmentation_id": 436, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap699de5cd-28", "ovs_interfaceid": "699de5cd-28f8-453d-8f0a-7856f2d6a2b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.734027] env[69927]: DEBUG oslo_vmware.api [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Task: {'id': task-4096195, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189581} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.734713] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 969.735024] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 969.735226] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 969.735520] env[69927]: INFO nova.compute.manager [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Took 1.10 seconds to destroy the instance on the hypervisor. [ 969.735728] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 969.736024] env[69927]: DEBUG nova.compute.manager [-] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 969.736024] env[69927]: DEBUG nova.network.neutron [-] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 969.778188] env[69927]: DEBUG nova.network.neutron [-] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 969.816079] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 93d19a66-f00e-4fa8-9eed-32035b020ba2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 969.933115] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.040139] env[69927]: DEBUG oslo_vmware.api [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Task: {'id': task-4096196, 'name': PowerOffVM_Task, 'duration_secs': 0.427266} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.040479] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 970.041150] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 970.041150] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f8bc4063-0359-4038-9bf7-482d2057b0df {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.126058] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 970.126506] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 970.126821] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Deleting the datastore file [datastore2] 50eedb80-d4bc-42c4-9686-6549cbd675b7 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 970.127852] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4675b44d-877f-4e3d-98d5-ca39fe74078a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.138196] env[69927]: DEBUG oslo_vmware.api [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Waiting for the task: (returnval){ [ 970.138196] env[69927]: value = "task-4096198" [ 970.138196] env[69927]: _type = "Task" [ 970.138196] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.150046] env[69927]: DEBUG oslo_vmware.api [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Task: {'id': task-4096198, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.196387] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Releasing lock "refresh_cache-a2b1684f-82af-42fc-925e-db36f31cfe63" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 970.281326] env[69927]: DEBUG nova.network.neutron [-] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.324489] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 0c8e43a3-3f33-4a41-81d3-a98565dca4a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 970.324892] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 970.325075] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3712MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 970.470408] env[69927]: DEBUG nova.network.neutron [-] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.559358] env[69927]: DEBUG nova.compute.manager [req-513a10f2-ebe4-4890-b3d6-a9b43b4fbe89 req-096101bb-646b-4e0c-8604-a88252b43056 service nova] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Received event network-vif-deleted-8c604e20-6cfc-4498-a689-d24876c07513 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 970.655029] env[69927]: DEBUG oslo_vmware.api [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Task: {'id': task-4096198, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.286777} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.655029] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 970.655029] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 970.655029] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 970.655029] env[69927]: INFO nova.compute.manager [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Took 1.14 seconds to destroy the instance on the hypervisor. [ 970.655029] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 970.655314] env[69927]: DEBUG nova.compute.manager [-] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 970.655314] env[69927]: DEBUG nova.network.neutron [-] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 970.792267] env[69927]: INFO nova.compute.manager [-] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Took 1.06 seconds to deallocate network for instance. [ 970.961630] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 970.963696] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea7c354d-7d72-4034-bb83-eb4f008eaa87 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.976431] env[69927]: INFO nova.compute.manager [-] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Took 2.43 seconds to deallocate network for instance. [ 970.976883] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 970.981207] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1cef18cb-6ae2-4b81-8950-2b1b60ac0b6a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.008130] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48604063-e736-44d6-8943-ecee2958c10a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.018054] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7186360d-3f12-4ecf-a615-8af98b90d582 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.054297] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b1f678-307e-4f97-84db-ebb1711d3b27 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.064696] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72ab0245-7a80-463a-ae06-b8f97fb21174 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.070796] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 971.071048] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 971.071698] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Deleting the datastore file [datastore1] a2b1684f-82af-42fc-925e-db36f31cfe63 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 971.072687] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-73fe6b4c-69d9-47b5-a01a-87f1ade3a8ca {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.084143] env[69927]: DEBUG nova.compute.provider_tree [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 971.092010] env[69927]: DEBUG oslo_vmware.api [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 971.092010] env[69927]: value = "task-4096200" [ 971.092010] env[69927]: _type = "Task" [ 971.092010] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.103806] env[69927]: DEBUG oslo_vmware.api [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096200, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.299810] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.492104] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.607551] env[69927]: DEBUG oslo_vmware.api [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096200, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.233428} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.607595] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 971.607779] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 971.607948] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 971.613118] env[69927]: ERROR nova.scheduler.client.report [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [req-ecd47cbd-76dc-42f5-bb0b-d5f8734e208c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2f529b36-df5f-4b37-8103-68f74f737726. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ecd47cbd-76dc-42f5-bb0b-d5f8734e208c"}]} [ 971.640222] env[69927]: DEBUG nova.scheduler.client.report [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Refreshing inventories for resource provider 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 971.651340] env[69927]: INFO nova.scheduler.client.report [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Deleted allocations for instance a2b1684f-82af-42fc-925e-db36f31cfe63 [ 971.664417] env[69927]: DEBUG nova.scheduler.client.report [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Updating ProviderTree inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 971.664417] env[69927]: DEBUG nova.compute.provider_tree [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 971.675707] env[69927]: DEBUG nova.scheduler.client.report [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Refreshing aggregate associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, aggregates: None {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 971.702244] env[69927]: DEBUG nova.scheduler.client.report [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Refreshing trait associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 972.129059] env[69927]: DEBUG nova.network.neutron [-] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.156813] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.254620] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac6cb80a-57b6-4f52-a8cb-f6b3333f6e22 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.265950] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdbeb8b8-e101-4a54-85d5-72b498b067dc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.304809] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54072100-fa3c-4b6c-a4e0-bdc7460da389 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.317814] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c8f546a-4c29-4c38-9116-1db2a49ada8e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.338771] env[69927]: DEBUG nova.compute.provider_tree [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 972.543033] env[69927]: DEBUG oslo_vmware.rw_handles [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527eeff3-6c0a-fdae-802f-14d51644bb7d/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 972.545444] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0111444-3278-40c7-9856-7c351f845eba {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.557154] env[69927]: DEBUG oslo_vmware.rw_handles [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527eeff3-6c0a-fdae-802f-14d51644bb7d/disk-0.vmdk is in state: ready. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 972.557154] env[69927]: ERROR oslo_vmware.rw_handles [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527eeff3-6c0a-fdae-802f-14d51644bb7d/disk-0.vmdk due to incomplete transfer. [ 972.557154] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4da8df44-8514-4a3a-900e-4905164f03ff {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.570543] env[69927]: DEBUG oslo_vmware.rw_handles [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527eeff3-6c0a-fdae-802f-14d51644bb7d/disk-0.vmdk. {{(pid=69927) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 972.570821] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Uploaded image c87f5a71-3793-40fd-ad59-7b537237d25e to the Glance image server {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 972.572954] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Destroying the VM {{(pid=69927) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 972.573201] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e885721e-751c-428f-8c2f-ac970da1be4c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.580294] env[69927]: DEBUG oslo_vmware.api [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 972.580294] env[69927]: value = "task-4096201" [ 972.580294] env[69927]: _type = "Task" [ 972.580294] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.590333] env[69927]: DEBUG oslo_vmware.api [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096201, 'name': Destroy_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.635571] env[69927]: INFO nova.compute.manager [-] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Took 1.98 seconds to deallocate network for instance. [ 972.697029] env[69927]: DEBUG nova.compute.manager [req-f1999d33-ea44-48b7-8b30-f34ebff39819 req-59f8acce-c9da-4716-8332-6ebd51b8c443 service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Received event network-vif-unplugged-699de5cd-28f8-453d-8f0a-7856f2d6a2b0 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 972.697029] env[69927]: DEBUG oslo_concurrency.lockutils [req-f1999d33-ea44-48b7-8b30-f34ebff39819 req-59f8acce-c9da-4716-8332-6ebd51b8c443 service nova] Acquiring lock "a2b1684f-82af-42fc-925e-db36f31cfe63-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.697029] env[69927]: DEBUG oslo_concurrency.lockutils [req-f1999d33-ea44-48b7-8b30-f34ebff39819 req-59f8acce-c9da-4716-8332-6ebd51b8c443 service nova] Lock "a2b1684f-82af-42fc-925e-db36f31cfe63-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.697145] env[69927]: DEBUG oslo_concurrency.lockutils [req-f1999d33-ea44-48b7-8b30-f34ebff39819 req-59f8acce-c9da-4716-8332-6ebd51b8c443 service nova] Lock "a2b1684f-82af-42fc-925e-db36f31cfe63-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.697336] env[69927]: DEBUG nova.compute.manager [req-f1999d33-ea44-48b7-8b30-f34ebff39819 req-59f8acce-c9da-4716-8332-6ebd51b8c443 service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] No waiting events found dispatching network-vif-unplugged-699de5cd-28f8-453d-8f0a-7856f2d6a2b0 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 972.697526] env[69927]: WARNING nova.compute.manager [req-f1999d33-ea44-48b7-8b30-f34ebff39819 req-59f8acce-c9da-4716-8332-6ebd51b8c443 service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Received unexpected event network-vif-unplugged-699de5cd-28f8-453d-8f0a-7856f2d6a2b0 for instance with vm_state shelved_offloaded and task_state None. [ 972.697687] env[69927]: DEBUG nova.compute.manager [req-f1999d33-ea44-48b7-8b30-f34ebff39819 req-59f8acce-c9da-4716-8332-6ebd51b8c443 service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Received event network-changed-699de5cd-28f8-453d-8f0a-7856f2d6a2b0 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 972.699077] env[69927]: DEBUG nova.compute.manager [req-f1999d33-ea44-48b7-8b30-f34ebff39819 req-59f8acce-c9da-4716-8332-6ebd51b8c443 service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Refreshing instance network info cache due to event network-changed-699de5cd-28f8-453d-8f0a-7856f2d6a2b0. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 972.699077] env[69927]: DEBUG oslo_concurrency.lockutils [req-f1999d33-ea44-48b7-8b30-f34ebff39819 req-59f8acce-c9da-4716-8332-6ebd51b8c443 service nova] Acquiring lock "refresh_cache-a2b1684f-82af-42fc-925e-db36f31cfe63" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.699077] env[69927]: DEBUG oslo_concurrency.lockutils [req-f1999d33-ea44-48b7-8b30-f34ebff39819 req-59f8acce-c9da-4716-8332-6ebd51b8c443 service nova] Acquired lock "refresh_cache-a2b1684f-82af-42fc-925e-db36f31cfe63" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 972.699077] env[69927]: DEBUG nova.network.neutron [req-f1999d33-ea44-48b7-8b30-f34ebff39819 req-59f8acce-c9da-4716-8332-6ebd51b8c443 service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Refreshing network info cache for port 699de5cd-28f8-453d-8f0a-7856f2d6a2b0 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 972.878818] env[69927]: DEBUG nova.scheduler.client.report [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Updated inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 with generation 103 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 972.878818] env[69927]: DEBUG nova.compute.provider_tree [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Updating resource provider 2f529b36-df5f-4b37-8103-68f74f737726 generation from 103 to 104 during operation: update_inventory {{(pid=69927) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 972.878818] env[69927]: DEBUG nova.compute.provider_tree [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 973.096721] env[69927]: DEBUG oslo_vmware.api [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096201, 'name': Destroy_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.143166] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.385440] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69927) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 973.385440] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.169s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.385588] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 59.973s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.385767] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.388153] env[69927]: DEBUG oslo_concurrency.lockutils [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 58.500s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.389730] env[69927]: INFO nova.compute.claims [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 973.394183] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.394183] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Cleaning up deleted instances {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 973.429417] env[69927]: INFO nova.scheduler.client.report [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Deleted allocations for instance cb35090d-bfd2-46df-8ee5-d9b068ba0a28 [ 973.595509] env[69927]: DEBUG oslo_vmware.api [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096201, 'name': Destroy_Task, 'duration_secs': 0.51878} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.595821] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Destroyed the VM [ 973.595964] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Deleting Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 973.596296] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-853f52d8-f23a-46ac-be7a-13a54393566d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.605108] env[69927]: DEBUG oslo_vmware.api [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 973.605108] env[69927]: value = "task-4096202" [ 973.605108] env[69927]: _type = "Task" [ 973.605108] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.606383] env[69927]: DEBUG nova.network.neutron [req-f1999d33-ea44-48b7-8b30-f34ebff39819 req-59f8acce-c9da-4716-8332-6ebd51b8c443 service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Updated VIF entry in instance network info cache for port 699de5cd-28f8-453d-8f0a-7856f2d6a2b0. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 973.606948] env[69927]: DEBUG nova.network.neutron [req-f1999d33-ea44-48b7-8b30-f34ebff39819 req-59f8acce-c9da-4716-8332-6ebd51b8c443 service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Updating instance_info_cache with network_info: [{"id": "699de5cd-28f8-453d-8f0a-7856f2d6a2b0", "address": "fa:16:3e:87:8d:a8", "network": {"id": "4a19c4d6-f840-486a-b113-3ec076b5b34d", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-437129762-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b7ae5270b0643e6b5720d4f2f765d74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap699de5cd-28", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.620447] env[69927]: DEBUG oslo_vmware.api [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096202, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.911493] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] There are 43 instances to clean {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 973.911783] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 07484a6c-f9d1-405b-9ae4-a1b830f474ed] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 973.938608] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b1513ced-21ef-42ab-b781-e80fd60e2200 tempest-ServersListShow2100Test-124448045 tempest-ServersListShow2100Test-124448045-project-member] Lock "cb35090d-bfd2-46df-8ee5-d9b068ba0a28" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 64.938s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 974.115876] env[69927]: DEBUG oslo_concurrency.lockutils [req-f1999d33-ea44-48b7-8b30-f34ebff39819 req-59f8acce-c9da-4716-8332-6ebd51b8c443 service nova] Releasing lock "refresh_cache-a2b1684f-82af-42fc-925e-db36f31cfe63" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 974.116375] env[69927]: DEBUG nova.compute.manager [req-f1999d33-ea44-48b7-8b30-f34ebff39819 req-59f8acce-c9da-4716-8332-6ebd51b8c443 service nova] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Received event network-vif-deleted-fa521cbd-9783-45a7-b712-2e14b9bf5139 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 974.124748] env[69927]: DEBUG oslo_vmware.api [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096202, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.417929] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: aba9cc2f-9ea1-44a8-a5da-89bdf7ebeda4] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 974.623685] env[69927]: DEBUG oslo_vmware.api [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096202, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.678346] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "b750ce2c-ee85-46c6-bf12-edb3f088e6de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 974.678519] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "b750ce2c-ee85-46c6-bf12-edb3f088e6de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 974.682584] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Acquiring lock "ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 974.682920] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Lock "ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 974.686671] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "a2b1684f-82af-42fc-925e-db36f31cfe63" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 974.921925] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 480a672c-cb48-45e3-86bd-1741957a5124] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 974.996676] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63aeb59c-bc0e-4d94-b2a5-003f4ccd1294 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.004478] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-755354fe-479a-4c23-94a2-e5f8e34260f7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.037526] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa08fb26-58df-471b-9ad2-603f0cc3590b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.045534] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c1538f7-4ea3-4204-b10b-612bb6378a25 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.060043] env[69927]: DEBUG nova.compute.provider_tree [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 975.122856] env[69927]: DEBUG oslo_vmware.api [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096202, 'name': RemoveSnapshot_Task, 'duration_secs': 1.30843} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.123026] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Deleted Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 975.123190] env[69927]: INFO nova.compute.manager [None req-88c7cd2e-75bd-459f-bbec-ad1b3c82a69e tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Took 16.66 seconds to snapshot the instance on the hypervisor. [ 975.425764] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: b67630a4-2c1a-440b-af82-80c908ffa6e9] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 975.563252] env[69927]: DEBUG nova.scheduler.client.report [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 975.930072] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 66ba8086-2dd4-4d02-aac3-1bbb4a404784] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 976.068728] env[69927]: DEBUG oslo_concurrency.lockutils [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.680s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 976.069340] env[69927]: DEBUG nova.compute.manager [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 976.071956] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 57.257s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 976.073291] env[69927]: INFO nova.compute.claims [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 976.433616] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 6be47dcb-ce00-4b81-9e69-35acabac046e] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 976.580243] env[69927]: DEBUG nova.compute.utils [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 976.581899] env[69927]: DEBUG nova.compute.manager [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 976.581899] env[69927]: DEBUG nova.network.neutron [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 976.631106] env[69927]: DEBUG nova.policy [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cd1a4b19b9874a17bde997440649c7e5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1c5a402c4ef2452b9809e30a2fe91431', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 976.937913] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: c7451ca3-f1fc-469b-b9d2-7fe24cb8949e] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 977.073106] env[69927]: DEBUG nova.network.neutron [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Successfully created port: 1fbc7a57-ea01-478c-8517-9b5d862bf7cc {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 977.085850] env[69927]: DEBUG nova.compute.manager [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 977.443571] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 8de4160d-2282-4ed3-bdf0-349445a6eab8] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 977.619462] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f3c149-d4f1-47af-aab7-9f319e45e6d2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.628964] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6f320b-f686-495f-97e7-2567c237576f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.661970] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735b052f-bf6b-46f6-813f-06d10e610a14 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.670417] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa80ea7-43ec-4974-9146-672dcc7f0635 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.684590] env[69927]: DEBUG nova.compute.provider_tree [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 977.947469] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 80fc9add-683b-424e-9876-cdcae664e2da] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 978.097183] env[69927]: DEBUG nova.compute.manager [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 978.130204] env[69927]: DEBUG nova.virt.hardware [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 978.131027] env[69927]: DEBUG nova.virt.hardware [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 978.131027] env[69927]: DEBUG nova.virt.hardware [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 978.131716] env[69927]: DEBUG nova.virt.hardware [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 978.131716] env[69927]: DEBUG nova.virt.hardware [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 978.131855] env[69927]: DEBUG nova.virt.hardware [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 978.132247] env[69927]: DEBUG nova.virt.hardware [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 978.132529] env[69927]: DEBUG nova.virt.hardware [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 978.132826] env[69927]: DEBUG nova.virt.hardware [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 978.133299] env[69927]: DEBUG nova.virt.hardware [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 978.133614] env[69927]: DEBUG nova.virt.hardware [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 978.134944] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d4f756c-2658-4491-a8d5-66deb9424ba7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.146810] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5b01453-f2ca-4aa0-84c5-86820c77249f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.188816] env[69927]: DEBUG nova.scheduler.client.report [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 978.450648] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: a36b06ca-77c8-4d2f-8b43-2c160fbac93f] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 978.699221] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.625s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.699221] env[69927]: DEBUG nova.compute.manager [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 978.700266] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 55.530s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.702384] env[69927]: INFO nova.compute.claims [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 978.764467] env[69927]: DEBUG nova.compute.manager [req-59ddd54b-d279-4a1e-b23a-4b6bc38c2365 req-f27200ac-16fe-4a5c-bd73-b7e1288204cb service nova] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Received event network-vif-plugged-1fbc7a57-ea01-478c-8517-9b5d862bf7cc {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 978.764467] env[69927]: DEBUG oslo_concurrency.lockutils [req-59ddd54b-d279-4a1e-b23a-4b6bc38c2365 req-f27200ac-16fe-4a5c-bd73-b7e1288204cb service nova] Acquiring lock "67e00c40-35b6-4a9f-9505-19b804e78c04-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.764467] env[69927]: DEBUG oslo_concurrency.lockutils [req-59ddd54b-d279-4a1e-b23a-4b6bc38c2365 req-f27200ac-16fe-4a5c-bd73-b7e1288204cb service nova] Lock "67e00c40-35b6-4a9f-9505-19b804e78c04-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.764467] env[69927]: DEBUG oslo_concurrency.lockutils [req-59ddd54b-d279-4a1e-b23a-4b6bc38c2365 req-f27200ac-16fe-4a5c-bd73-b7e1288204cb service nova] Lock "67e00c40-35b6-4a9f-9505-19b804e78c04-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.764467] env[69927]: DEBUG nova.compute.manager [req-59ddd54b-d279-4a1e-b23a-4b6bc38c2365 req-f27200ac-16fe-4a5c-bd73-b7e1288204cb service nova] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] No waiting events found dispatching network-vif-plugged-1fbc7a57-ea01-478c-8517-9b5d862bf7cc {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 978.764467] env[69927]: WARNING nova.compute.manager [req-59ddd54b-d279-4a1e-b23a-4b6bc38c2365 req-f27200ac-16fe-4a5c-bd73-b7e1288204cb service nova] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Received unexpected event network-vif-plugged-1fbc7a57-ea01-478c-8517-9b5d862bf7cc for instance with vm_state building and task_state spawning. [ 978.869767] env[69927]: DEBUG nova.network.neutron [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Successfully updated port: 1fbc7a57-ea01-478c-8517-9b5d862bf7cc {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 978.954771] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 915797c5-6f68-4355-a6b0-ad2b06b826cb] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 979.210462] env[69927]: DEBUG nova.compute.utils [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 979.212330] env[69927]: DEBUG nova.compute.manager [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 979.212838] env[69927]: DEBUG nova.network.neutron [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 979.291250] env[69927]: DEBUG nova.policy [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73424f30d6e14b179075e20e0df8c52f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '76d0467960094b28b33ccb3a692e46fa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 979.370742] env[69927]: DEBUG oslo_concurrency.lockutils [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "refresh_cache-67e00c40-35b6-4a9f-9505-19b804e78c04" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.370910] env[69927]: DEBUG oslo_concurrency.lockutils [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquired lock "refresh_cache-67e00c40-35b6-4a9f-9505-19b804e78c04" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 979.371109] env[69927]: DEBUG nova.network.neutron [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 979.458704] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 0e6e60e7-d623-44da-912e-804da4d616c9] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 979.720254] env[69927]: DEBUG nova.compute.manager [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 979.753231] env[69927]: DEBUG nova.network.neutron [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Successfully created port: 09ee92f2-f98d-47b8-81a8-b99cdce409e0 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 979.917328] env[69927]: DEBUG nova.network.neutron [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 979.964641] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 353ceb53-07e6-4e9b-bed5-ce9fca368b27] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 980.235017] env[69927]: DEBUG nova.network.neutron [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Updating instance_info_cache with network_info: [{"id": "1fbc7a57-ea01-478c-8517-9b5d862bf7cc", "address": "fa:16:3e:27:46:31", "network": {"id": "41648ae7-ad4c-489e-b90f-65446dc0b1d1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1522598229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c5a402c4ef2452b9809e30a2fe91431", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c8a5d7c-ee1f-4a41-94e4-db31e85a398d", "external-id": "cl2-zone-613", "segmentation_id": 613, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fbc7a57-ea", "ovs_interfaceid": "1fbc7a57-ea01-478c-8517-9b5d862bf7cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.237209] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493b3550-c92f-4112-8f2f-9305673979c7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.248575] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de9987d-2b54-4972-8ad2-05f504667065 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.286248] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c815580-8d48-4bcf-8d5d-64b143bd3008 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.295749] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fac3ebf1-4abf-4678-b442-1f1579237f44 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.312157] env[69927]: DEBUG nova.compute.provider_tree [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 980.468199] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: cde9885b-1aa8-411d-847e-087fe375002b] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 980.741861] env[69927]: DEBUG oslo_concurrency.lockutils [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Releasing lock "refresh_cache-67e00c40-35b6-4a9f-9505-19b804e78c04" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 980.742283] env[69927]: DEBUG nova.compute.manager [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Instance network_info: |[{"id": "1fbc7a57-ea01-478c-8517-9b5d862bf7cc", "address": "fa:16:3e:27:46:31", "network": {"id": "41648ae7-ad4c-489e-b90f-65446dc0b1d1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1522598229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c5a402c4ef2452b9809e30a2fe91431", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c8a5d7c-ee1f-4a41-94e4-db31e85a398d", "external-id": "cl2-zone-613", "segmentation_id": 613, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fbc7a57-ea", "ovs_interfaceid": "1fbc7a57-ea01-478c-8517-9b5d862bf7cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 980.743347] env[69927]: DEBUG nova.compute.manager [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 980.746062] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:46:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4c8a5d7c-ee1f-4a41-94e4-db31e85a398d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1fbc7a57-ea01-478c-8517-9b5d862bf7cc', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 980.753674] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 980.753912] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 980.754162] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-048e7dad-ece3-48de-aa32-780534ce87d6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.779509] env[69927]: DEBUG nova.virt.hardware [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 980.779793] env[69927]: DEBUG nova.virt.hardware [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 980.779950] env[69927]: DEBUG nova.virt.hardware [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 980.780148] env[69927]: DEBUG nova.virt.hardware [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 980.780296] env[69927]: DEBUG nova.virt.hardware [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 980.780443] env[69927]: DEBUG nova.virt.hardware [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 980.780649] env[69927]: DEBUG nova.virt.hardware [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 980.780806] env[69927]: DEBUG nova.virt.hardware [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 980.780970] env[69927]: DEBUG nova.virt.hardware [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 980.781139] env[69927]: DEBUG nova.virt.hardware [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 980.781304] env[69927]: DEBUG nova.virt.hardware [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 980.782148] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5e37a48-76d1-4203-99bd-fba779ed7935 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.787923] env[69927]: DEBUG nova.compute.manager [req-9435a9c4-8792-4743-82aa-20dc6c844f24 req-deb49149-7489-4d74-871d-d79a93041304 service nova] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Received event network-changed-1fbc7a57-ea01-478c-8517-9b5d862bf7cc {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 980.788158] env[69927]: DEBUG nova.compute.manager [req-9435a9c4-8792-4743-82aa-20dc6c844f24 req-deb49149-7489-4d74-871d-d79a93041304 service nova] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Refreshing instance network info cache due to event network-changed-1fbc7a57-ea01-478c-8517-9b5d862bf7cc. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 980.788326] env[69927]: DEBUG oslo_concurrency.lockutils [req-9435a9c4-8792-4743-82aa-20dc6c844f24 req-deb49149-7489-4d74-871d-d79a93041304 service nova] Acquiring lock "refresh_cache-67e00c40-35b6-4a9f-9505-19b804e78c04" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.788473] env[69927]: DEBUG oslo_concurrency.lockutils [req-9435a9c4-8792-4743-82aa-20dc6c844f24 req-deb49149-7489-4d74-871d-d79a93041304 service nova] Acquired lock "refresh_cache-67e00c40-35b6-4a9f-9505-19b804e78c04" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 980.788624] env[69927]: DEBUG nova.network.neutron [req-9435a9c4-8792-4743-82aa-20dc6c844f24 req-deb49149-7489-4d74-871d-d79a93041304 service nova] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Refreshing network info cache for port 1fbc7a57-ea01-478c-8517-9b5d862bf7cc {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 980.791039] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 980.791039] env[69927]: value = "task-4096203" [ 980.791039] env[69927]: _type = "Task" [ 980.791039] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.799225] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde70b77-0878-43eb-b1fd-6a7d91c502e1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.807362] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096203, 'name': CreateVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.819729] env[69927]: DEBUG nova.scheduler.client.report [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 980.971614] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: c2b6b943-f6d6-427f-aba5-1d619d889325] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 981.303862] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096203, 'name': CreateVM_Task, 'duration_secs': 0.327388} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.304125] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 981.304822] env[69927]: DEBUG oslo_concurrency.lockutils [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.304985] env[69927]: DEBUG oslo_concurrency.lockutils [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 981.306158] env[69927]: DEBUG oslo_concurrency.lockutils [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 981.306158] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-050d310a-5f76-4d03-bf94-066bf7ddf6d5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.310770] env[69927]: DEBUG oslo_vmware.api [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 981.310770] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521a2fde-af15-a189-e391-5998b5b1a551" [ 981.310770] env[69927]: _type = "Task" [ 981.310770] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.319999] env[69927]: DEBUG oslo_vmware.api [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521a2fde-af15-a189-e391-5998b5b1a551, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.325316] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.625s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.325895] env[69927]: DEBUG nova.compute.manager [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 981.328662] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 58.084s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.328839] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.330922] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 52.280s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.332324] env[69927]: INFO nova.compute.claims [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 981.359292] env[69927]: INFO nova.scheduler.client.report [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Deleted allocations for instance 77c6ce9e-5e15-41e4-aa81-1ef01248aa32 [ 981.477389] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: c6a06550-33ed-4fee-bd37-3fce9c55b235] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 981.787636] env[69927]: DEBUG nova.network.neutron [req-9435a9c4-8792-4743-82aa-20dc6c844f24 req-deb49149-7489-4d74-871d-d79a93041304 service nova] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Updated VIF entry in instance network info cache for port 1fbc7a57-ea01-478c-8517-9b5d862bf7cc. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 981.788099] env[69927]: DEBUG nova.network.neutron [req-9435a9c4-8792-4743-82aa-20dc6c844f24 req-deb49149-7489-4d74-871d-d79a93041304 service nova] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Updating instance_info_cache with network_info: [{"id": "1fbc7a57-ea01-478c-8517-9b5d862bf7cc", "address": "fa:16:3e:27:46:31", "network": {"id": "41648ae7-ad4c-489e-b90f-65446dc0b1d1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1522598229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c5a402c4ef2452b9809e30a2fe91431", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c8a5d7c-ee1f-4a41-94e4-db31e85a398d", "external-id": "cl2-zone-613", "segmentation_id": 613, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fbc7a57-ea", "ovs_interfaceid": "1fbc7a57-ea01-478c-8517-9b5d862bf7cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.821632] env[69927]: DEBUG oslo_vmware.api [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521a2fde-af15-a189-e391-5998b5b1a551, 'name': SearchDatastore_Task, 'duration_secs': 0.011627} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.821964] env[69927]: DEBUG oslo_concurrency.lockutils [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 981.822188] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 981.822428] env[69927]: DEBUG oslo_concurrency.lockutils [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.822576] env[69927]: DEBUG oslo_concurrency.lockutils [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 981.824144] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 981.824144] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a945079a-e43b-40db-8164-760692535b4b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.833593] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 981.833723] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 981.834483] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a795355f-f57d-4c7e-bc8a-8c903b809ba3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.839224] env[69927]: DEBUG nova.compute.utils [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 981.845552] env[69927]: DEBUG nova.compute.manager [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 981.845725] env[69927]: DEBUG nova.network.neutron [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 981.848123] env[69927]: DEBUG oslo_vmware.api [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 981.848123] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52669844-c554-5bfd-cc9b-ae56a458b2d9" [ 981.848123] env[69927]: _type = "Task" [ 981.848123] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.858697] env[69927]: DEBUG oslo_vmware.api [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52669844-c554-5bfd-cc9b-ae56a458b2d9, 'name': SearchDatastore_Task, 'duration_secs': 0.009878} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.859505] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5cf90a2-d6f1-4c5a-ab4e-f3bfa2c9239a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.867822] env[69927]: DEBUG oslo_vmware.api [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 981.867822] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5247d60e-385e-6253-6226-efe178a86837" [ 981.867822] env[69927]: _type = "Task" [ 981.867822] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.868281] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3bc77687-439a-4a7b-842c-36f4b7aa726c tempest-ServerRescueTestJSON-1075581336 tempest-ServerRescueTestJSON-1075581336-project-member] Lock "77c6ce9e-5e15-41e4-aa81-1ef01248aa32" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 62.326s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.879275] env[69927]: DEBUG oslo_vmware.api [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5247d60e-385e-6253-6226-efe178a86837, 'name': SearchDatastore_Task, 'duration_secs': 0.009504} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.879275] env[69927]: DEBUG oslo_concurrency.lockutils [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 981.879275] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 67e00c40-35b6-4a9f-9505-19b804e78c04/67e00c40-35b6-4a9f-9505-19b804e78c04.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 981.879275] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-73c599b3-0aac-43cb-8364-365c06cf32ba {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.886240] env[69927]: DEBUG oslo_vmware.api [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 981.886240] env[69927]: value = "task-4096204" [ 981.886240] env[69927]: _type = "Task" [ 981.886240] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.895060] env[69927]: DEBUG oslo_vmware.api [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096204, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.931597] env[69927]: DEBUG nova.policy [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17f0a09d1477425b85bd55fd26f3bddb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3159998f0e574efbb6241904abb29576', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 981.978648] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: bf4bee47-36ce-43ee-96f1-96f262882986] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 981.986597] env[69927]: DEBUG nova.network.neutron [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Successfully updated port: 09ee92f2-f98d-47b8-81a8-b99cdce409e0 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 982.291261] env[69927]: DEBUG oslo_concurrency.lockutils [req-9435a9c4-8792-4743-82aa-20dc6c844f24 req-deb49149-7489-4d74-871d-d79a93041304 service nova] Releasing lock "refresh_cache-67e00c40-35b6-4a9f-9505-19b804e78c04" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.346449] env[69927]: DEBUG nova.compute.manager [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 982.400945] env[69927]: DEBUG oslo_vmware.api [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096204, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491308} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.401304] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 67e00c40-35b6-4a9f-9505-19b804e78c04/67e00c40-35b6-4a9f-9505-19b804e78c04.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 982.401533] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 982.401794] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2203ccfb-a0e5-47de-a45b-6c04e64f72e2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.409446] env[69927]: DEBUG oslo_vmware.api [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 982.409446] env[69927]: value = "task-4096205" [ 982.409446] env[69927]: _type = "Task" [ 982.409446] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.423850] env[69927]: DEBUG oslo_vmware.api [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096205, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.481714] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 9d83dda3-5fb1-416d-9307-faeef454efec] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 982.489685] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Acquiring lock "refresh_cache-01c8eb3b-bf30-4b00-af71-e32f0dc19171" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.489940] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Acquired lock "refresh_cache-01c8eb3b-bf30-4b00-af71-e32f0dc19171" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 982.490197] env[69927]: DEBUG nova.network.neutron [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 982.893249] env[69927]: DEBUG nova.network.neutron [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Successfully created port: 866c0d6a-a2a2-45f3-a0e9-06356a7cc46d {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 982.908741] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b37a696-bb6f-4324-b52c-888ae2e5bf89 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.919709] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffaf6e9f-7e38-430d-8ca2-94e437c49b01 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.927977] env[69927]: DEBUG oslo_vmware.api [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096205, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069572} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.928984] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 982.930962] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15c4c0f1-721f-4eaf-9c6b-354165b0ba21 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.963064] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9159d2b9-c105-4f31-8f1d-21230b0d427c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.985308] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 67e00c40-35b6-4a9f-9505-19b804e78c04/67e00c40-35b6-4a9f-9505-19b804e78c04.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 982.986221] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: a4249857-6f60-4040-b676-d2d19dc83f15] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 982.988035] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad618ebc-a6a1-4e74-aa1d-43c02f1d4375 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.009884] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba6fcde-58a3-42df-b6a2-8e3f1ba247f3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.016042] env[69927]: DEBUG oslo_vmware.api [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 983.016042] env[69927]: value = "task-4096206" [ 983.016042] env[69927]: _type = "Task" [ 983.016042] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.027521] env[69927]: DEBUG nova.compute.provider_tree [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 983.034850] env[69927]: DEBUG oslo_vmware.api [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096206, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.105740] env[69927]: DEBUG nova.network.neutron [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 983.287805] env[69927]: DEBUG nova.compute.manager [req-e69a6577-dea6-42f8-8a51-5e821ec2f70e req-9b74ca00-ef76-4790-a6f8-af86191b0c4c service nova] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Received event network-vif-plugged-09ee92f2-f98d-47b8-81a8-b99cdce409e0 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 983.288459] env[69927]: DEBUG oslo_concurrency.lockutils [req-e69a6577-dea6-42f8-8a51-5e821ec2f70e req-9b74ca00-ef76-4790-a6f8-af86191b0c4c service nova] Acquiring lock "01c8eb3b-bf30-4b00-af71-e32f0dc19171-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 983.291886] env[69927]: DEBUG oslo_concurrency.lockutils [req-e69a6577-dea6-42f8-8a51-5e821ec2f70e req-9b74ca00-ef76-4790-a6f8-af86191b0c4c service nova] Lock "01c8eb3b-bf30-4b00-af71-e32f0dc19171-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 983.291886] env[69927]: DEBUG oslo_concurrency.lockutils [req-e69a6577-dea6-42f8-8a51-5e821ec2f70e req-9b74ca00-ef76-4790-a6f8-af86191b0c4c service nova] Lock "01c8eb3b-bf30-4b00-af71-e32f0dc19171-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 983.291886] env[69927]: DEBUG nova.compute.manager [req-e69a6577-dea6-42f8-8a51-5e821ec2f70e req-9b74ca00-ef76-4790-a6f8-af86191b0c4c service nova] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] No waiting events found dispatching network-vif-plugged-09ee92f2-f98d-47b8-81a8-b99cdce409e0 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 983.291886] env[69927]: WARNING nova.compute.manager [req-e69a6577-dea6-42f8-8a51-5e821ec2f70e req-9b74ca00-ef76-4790-a6f8-af86191b0c4c service nova] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Received unexpected event network-vif-plugged-09ee92f2-f98d-47b8-81a8-b99cdce409e0 for instance with vm_state building and task_state spawning. [ 983.291886] env[69927]: DEBUG nova.compute.manager [req-e69a6577-dea6-42f8-8a51-5e821ec2f70e req-9b74ca00-ef76-4790-a6f8-af86191b0c4c service nova] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Received event network-changed-09ee92f2-f98d-47b8-81a8-b99cdce409e0 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 983.291886] env[69927]: DEBUG nova.compute.manager [req-e69a6577-dea6-42f8-8a51-5e821ec2f70e req-9b74ca00-ef76-4790-a6f8-af86191b0c4c service nova] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Refreshing instance network info cache due to event network-changed-09ee92f2-f98d-47b8-81a8-b99cdce409e0. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 983.291886] env[69927]: DEBUG oslo_concurrency.lockutils [req-e69a6577-dea6-42f8-8a51-5e821ec2f70e req-9b74ca00-ef76-4790-a6f8-af86191b0c4c service nova] Acquiring lock "refresh_cache-01c8eb3b-bf30-4b00-af71-e32f0dc19171" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.367814] env[69927]: DEBUG nova.compute.manager [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 983.400517] env[69927]: DEBUG nova.virt.hardware [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 983.401075] env[69927]: DEBUG nova.virt.hardware [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 983.401265] env[69927]: DEBUG nova.virt.hardware [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 983.401501] env[69927]: DEBUG nova.virt.hardware [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 983.401708] env[69927]: DEBUG nova.virt.hardware [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 983.401870] env[69927]: DEBUG nova.virt.hardware [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 983.402098] env[69927]: DEBUG nova.virt.hardware [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 983.402263] env[69927]: DEBUG nova.virt.hardware [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 983.402441] env[69927]: DEBUG nova.virt.hardware [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 983.402605] env[69927]: DEBUG nova.virt.hardware [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 983.402783] env[69927]: DEBUG nova.virt.hardware [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 983.403966] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b23d4036-53ed-46f6-a9bc-ac78f65a088f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.415283] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b66e2ed8-9fc9-4dcd-a5a5-90f4b82c7f8a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.504452] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: f6972b90-7746-4a37-8be8-1739f96dc3dc] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 983.525651] env[69927]: DEBUG oslo_vmware.api [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096206, 'name': ReconfigVM_Task, 'duration_secs': 0.312571} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.525952] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 67e00c40-35b6-4a9f-9505-19b804e78c04/67e00c40-35b6-4a9f-9505-19b804e78c04.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 983.526767] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-874040e2-6ba0-4f79-8b37-4cc58d777f23 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.530553] env[69927]: DEBUG nova.scheduler.client.report [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 983.539111] env[69927]: DEBUG oslo_vmware.api [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 983.539111] env[69927]: value = "task-4096207" [ 983.539111] env[69927]: _type = "Task" [ 983.539111] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.552074] env[69927]: DEBUG oslo_vmware.api [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096207, 'name': Rename_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.553215] env[69927]: DEBUG nova.network.neutron [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Updating instance_info_cache with network_info: [{"id": "09ee92f2-f98d-47b8-81a8-b99cdce409e0", "address": "fa:16:3e:ff:4e:70", "network": {"id": "f56f2bef-2712-4a3f-95b7-10160511508e", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1137847455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d0467960094b28b33ccb3a692e46fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09ee92f2-f9", "ovs_interfaceid": "09ee92f2-f98d-47b8-81a8-b99cdce409e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.007564] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 14359034-232d-478f-bf65-cf9937c59229] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 984.037872] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.707s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 984.038450] env[69927]: DEBUG nova.compute.manager [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 984.041129] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 54.880s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 984.053561] env[69927]: DEBUG oslo_vmware.api [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096207, 'name': Rename_Task, 'duration_secs': 0.148232} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.053969] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 984.055328] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-837d44b2-8c41-4fe6-b9a9-5b55a625a77d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.057905] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Releasing lock "refresh_cache-01c8eb3b-bf30-4b00-af71-e32f0dc19171" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 984.058431] env[69927]: DEBUG nova.compute.manager [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Instance network_info: |[{"id": "09ee92f2-f98d-47b8-81a8-b99cdce409e0", "address": "fa:16:3e:ff:4e:70", "network": {"id": "f56f2bef-2712-4a3f-95b7-10160511508e", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1137847455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d0467960094b28b33ccb3a692e46fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09ee92f2-f9", "ovs_interfaceid": "09ee92f2-f98d-47b8-81a8-b99cdce409e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 984.059514] env[69927]: DEBUG oslo_concurrency.lockutils [req-e69a6577-dea6-42f8-8a51-5e821ec2f70e req-9b74ca00-ef76-4790-a6f8-af86191b0c4c service nova] Acquired lock "refresh_cache-01c8eb3b-bf30-4b00-af71-e32f0dc19171" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 984.059800] env[69927]: DEBUG nova.network.neutron [req-e69a6577-dea6-42f8-8a51-5e821ec2f70e req-9b74ca00-ef76-4790-a6f8-af86191b0c4c service nova] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Refreshing network info cache for port 09ee92f2-f98d-47b8-81a8-b99cdce409e0 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 984.061165] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:4e:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '975b168a-03e5-449d-95ac-4d51ba027242', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '09ee92f2-f98d-47b8-81a8-b99cdce409e0', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 984.070949] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Creating folder: Project (76d0467960094b28b33ccb3a692e46fa). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 984.072553] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a698dd53-1f89-4644-8af9-35c57010c962 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.078917] env[69927]: DEBUG oslo_vmware.api [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 984.078917] env[69927]: value = "task-4096208" [ 984.078917] env[69927]: _type = "Task" [ 984.078917] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.085967] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Created folder: Project (76d0467960094b28b33ccb3a692e46fa) in parent group-v811283. [ 984.086091] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Creating folder: Instances. Parent ref: group-v811483. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 984.089735] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e2eff3ea-d02b-4adf-b297-5463a1a5dad2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.091762] env[69927]: DEBUG oslo_vmware.api [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096208, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.100061] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Created folder: Instances in parent group-v811483. [ 984.100529] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 984.100529] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 984.100726] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a19b5521-acb7-4e2c-9e85-49a1db487e95 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.122610] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 984.122610] env[69927]: value = "task-4096211" [ 984.122610] env[69927]: _type = "Task" [ 984.122610] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.139747] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096211, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.511451] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: cdf0ea6e-d884-49c1-87ec-cd6de1376c7f] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 984.547142] env[69927]: DEBUG nova.objects.instance [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lazy-loading 'migration_context' on Instance uuid c3e8a429-8484-4b11-abe3-1cccf0992556 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 984.549405] env[69927]: DEBUG nova.compute.utils [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 984.551189] env[69927]: DEBUG nova.compute.manager [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 984.551189] env[69927]: DEBUG nova.network.neutron [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 984.590016] env[69927]: DEBUG oslo_vmware.api [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096208, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.634882] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096211, 'name': CreateVM_Task, 'duration_secs': 0.367997} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.635239] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 984.635773] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.635941] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 984.636515] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 984.636851] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81dd573d-e16a-4303-8c97-19e102319cad {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.643645] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Waiting for the task: (returnval){ [ 984.643645] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52da39c3-ad4b-13bf-245f-7ff98f8735ef" [ 984.643645] env[69927]: _type = "Task" [ 984.643645] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.645080] env[69927]: DEBUG nova.policy [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ea8c48f887ee4cfbb11f427dd66a6f1a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72b796c4097e41cf947cacae04cb3a20', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 984.659617] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52da39c3-ad4b-13bf-245f-7ff98f8735ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.794330] env[69927]: DEBUG nova.network.neutron [req-e69a6577-dea6-42f8-8a51-5e821ec2f70e req-9b74ca00-ef76-4790-a6f8-af86191b0c4c service nova] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Updated VIF entry in instance network info cache for port 09ee92f2-f98d-47b8-81a8-b99cdce409e0. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 984.794756] env[69927]: DEBUG nova.network.neutron [req-e69a6577-dea6-42f8-8a51-5e821ec2f70e req-9b74ca00-ef76-4790-a6f8-af86191b0c4c service nova] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Updating instance_info_cache with network_info: [{"id": "09ee92f2-f98d-47b8-81a8-b99cdce409e0", "address": "fa:16:3e:ff:4e:70", "network": {"id": "f56f2bef-2712-4a3f-95b7-10160511508e", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1137847455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d0467960094b28b33ccb3a692e46fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09ee92f2-f9", "ovs_interfaceid": "09ee92f2-f98d-47b8-81a8-b99cdce409e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.017376] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: e8e80be6-a82f-4cc5-92fd-366badf519b8] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 985.054590] env[69927]: DEBUG nova.compute.manager [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 985.099303] env[69927]: DEBUG oslo_vmware.api [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096208, 'name': PowerOnVM_Task, 'duration_secs': 0.812466} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.099664] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 985.099664] env[69927]: INFO nova.compute.manager [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Took 7.00 seconds to spawn the instance on the hypervisor. [ 985.099871] env[69927]: DEBUG nova.compute.manager [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 985.100663] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a10f21-9b68-4656-87e1-20f2d1a19c33 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.158176] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52da39c3-ad4b-13bf-245f-7ff98f8735ef, 'name': SearchDatastore_Task, 'duration_secs': 0.015204} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.160918] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 985.161191] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 985.161433] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.161579] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 985.161758] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 985.162505] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8f7d135-811e-4148-a418-72eb932f6a53 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.172226] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 985.172394] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 985.173058] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11492e5e-24b2-46c6-96ef-37b9cbfce30b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.184022] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Waiting for the task: (returnval){ [ 985.184022] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d77ccb-53b4-a016-d3c4-35cdd754c4e9" [ 985.184022] env[69927]: _type = "Task" [ 985.184022] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.192627] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d77ccb-53b4-a016-d3c4-35cdd754c4e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.298122] env[69927]: DEBUG oslo_concurrency.lockutils [req-e69a6577-dea6-42f8-8a51-5e821ec2f70e req-9b74ca00-ef76-4790-a6f8-af86191b0c4c service nova] Releasing lock "refresh_cache-01c8eb3b-bf30-4b00-af71-e32f0dc19171" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 985.437091] env[69927]: DEBUG nova.network.neutron [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Successfully created port: a4ba62a2-72b5-465c-bd2e-9ff3bff9da1c {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 985.495723] env[69927]: DEBUG nova.network.neutron [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Successfully updated port: 866c0d6a-a2a2-45f3-a0e9-06356a7cc46d {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 985.522063] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: a536b069-45e0-4ffe-be53-ac33f8cb6ec0] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 985.536677] env[69927]: DEBUG nova.compute.manager [req-0df115ca-d395-488e-94ab-fefec905eebd req-e6d7a84e-9c9b-4bf1-955f-39da19a54dd9 service nova] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Received event network-vif-plugged-866c0d6a-a2a2-45f3-a0e9-06356a7cc46d {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 985.537018] env[69927]: DEBUG oslo_concurrency.lockutils [req-0df115ca-d395-488e-94ab-fefec905eebd req-e6d7a84e-9c9b-4bf1-955f-39da19a54dd9 service nova] Acquiring lock "74ea584f-b20f-425b-acb3-0ec60e7f2a1e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 985.537114] env[69927]: DEBUG oslo_concurrency.lockutils [req-0df115ca-d395-488e-94ab-fefec905eebd req-e6d7a84e-9c9b-4bf1-955f-39da19a54dd9 service nova] Lock "74ea584f-b20f-425b-acb3-0ec60e7f2a1e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.537276] env[69927]: DEBUG oslo_concurrency.lockutils [req-0df115ca-d395-488e-94ab-fefec905eebd req-e6d7a84e-9c9b-4bf1-955f-39da19a54dd9 service nova] Lock "74ea584f-b20f-425b-acb3-0ec60e7f2a1e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.537522] env[69927]: DEBUG nova.compute.manager [req-0df115ca-d395-488e-94ab-fefec905eebd req-e6d7a84e-9c9b-4bf1-955f-39da19a54dd9 service nova] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] No waiting events found dispatching network-vif-plugged-866c0d6a-a2a2-45f3-a0e9-06356a7cc46d {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 985.538024] env[69927]: WARNING nova.compute.manager [req-0df115ca-d395-488e-94ab-fefec905eebd req-e6d7a84e-9c9b-4bf1-955f-39da19a54dd9 service nova] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Received unexpected event network-vif-plugged-866c0d6a-a2a2-45f3-a0e9-06356a7cc46d for instance with vm_state building and task_state spawning. [ 985.627235] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eead7c9d-e83e-454e-b3d4-6b727a2826d8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.632637] env[69927]: INFO nova.compute.manager [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Took 70.76 seconds to build instance. [ 985.638115] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e955f274-4e1d-4069-9464-85a9685cc8fe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.670823] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d013af-da4d-4fd0-9f9b-d2320b09ef5f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.682189] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a6d3d9c-bfe8-4788-9d23-ce1e982284e6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.704416] env[69927]: DEBUG nova.compute.provider_tree [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 985.709497] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d77ccb-53b4-a016-d3c4-35cdd754c4e9, 'name': SearchDatastore_Task, 'duration_secs': 0.009745} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.710514] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be7d2f84-4f99-4bbd-9fb3-ed5141ea8315 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.717178] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Waiting for the task: (returnval){ [ 985.717178] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5295f006-bd2a-5853-6a59-0a3c5fe30563" [ 985.717178] env[69927]: _type = "Task" [ 985.717178] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.726255] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5295f006-bd2a-5853-6a59-0a3c5fe30563, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.002951] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Acquiring lock "refresh_cache-74ea584f-b20f-425b-acb3-0ec60e7f2a1e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.002951] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Acquired lock "refresh_cache-74ea584f-b20f-425b-acb3-0ec60e7f2a1e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 986.002951] env[69927]: DEBUG nova.network.neutron [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 986.025365] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 39421358-2d66-4fbe-a4e0-8fdb0b420c5e] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 986.074772] env[69927]: DEBUG nova.compute.manager [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 986.105218] env[69927]: DEBUG nova.virt.hardware [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 986.105218] env[69927]: DEBUG nova.virt.hardware [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 986.105455] env[69927]: DEBUG nova.virt.hardware [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 986.105487] env[69927]: DEBUG nova.virt.hardware [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 986.105607] env[69927]: DEBUG nova.virt.hardware [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 986.105748] env[69927]: DEBUG nova.virt.hardware [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 986.105954] env[69927]: DEBUG nova.virt.hardware [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 986.106126] env[69927]: DEBUG nova.virt.hardware [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 986.106292] env[69927]: DEBUG nova.virt.hardware [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 986.106495] env[69927]: DEBUG nova.virt.hardware [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 986.107428] env[69927]: DEBUG nova.virt.hardware [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 986.107569] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568d4799-1777-40f6-ab9d-5553f18e341f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.117866] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c88d000c-4b3c-487e-8118-3cfaf0495b0c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.137695] env[69927]: DEBUG oslo_concurrency.lockutils [None req-35c55695-37bb-4974-a94a-2553186d8ca6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "67e00c40-35b6-4a9f-9505-19b804e78c04" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.962s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 986.211121] env[69927]: DEBUG nova.scheduler.client.report [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 986.231364] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5295f006-bd2a-5853-6a59-0a3c5fe30563, 'name': SearchDatastore_Task, 'duration_secs': 0.01021} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.231673] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 986.231971] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 01c8eb3b-bf30-4b00-af71-e32f0dc19171/01c8eb3b-bf30-4b00-af71-e32f0dc19171.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 986.232273] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-58b58895-0df2-4e7d-b438-adb653564536 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.241966] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Waiting for the task: (returnval){ [ 986.241966] env[69927]: value = "task-4096212" [ 986.241966] env[69927]: _type = "Task" [ 986.241966] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.255967] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': task-4096212, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.528325] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 6e698775-2556-4cbe-b65f-0cc3efa7bcf6] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 986.558284] env[69927]: DEBUG nova.network.neutron [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 986.618420] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4a595cc7-d52f-4551-92ac-13bfdf7b0d1a tempest-ServersAdminTestJSON-1660947661 tempest-ServersAdminTestJSON-1660947661-project-admin] Acquiring lock "refresh_cache-67e00c40-35b6-4a9f-9505-19b804e78c04" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.618420] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4a595cc7-d52f-4551-92ac-13bfdf7b0d1a tempest-ServersAdminTestJSON-1660947661 tempest-ServersAdminTestJSON-1660947661-project-admin] Acquired lock "refresh_cache-67e00c40-35b6-4a9f-9505-19b804e78c04" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 986.618846] env[69927]: DEBUG nova.network.neutron [None req-4a595cc7-d52f-4551-92ac-13bfdf7b0d1a tempest-ServersAdminTestJSON-1660947661 tempest-ServersAdminTestJSON-1660947661-project-admin] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 986.643234] env[69927]: DEBUG nova.compute.manager [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 986.755761] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': task-4096212, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.777642] env[69927]: DEBUG nova.network.neutron [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Updating instance_info_cache with network_info: [{"id": "866c0d6a-a2a2-45f3-a0e9-06356a7cc46d", "address": "fa:16:3e:c3:a0:e0", "network": {"id": "86a3ae49-85ef-464f-a650-b4508e7e3da7", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1348743898-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3159998f0e574efbb6241904abb29576", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap866c0d6a-a2", "ovs_interfaceid": "866c0d6a-a2a2-45f3-a0e9-06356a7cc46d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.036291] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 5f67d6a0-e4b7-435e-8991-0f54e0379d22] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 987.277364] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.277364] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.186s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.277364] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 56.563s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.277364] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.277364] env[69927]: DEBUG oslo_concurrency.lockutils [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 52.592s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.277364] env[69927]: DEBUG nova.objects.instance [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69927) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 987.277364] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': task-4096212, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.516295} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.277364] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 01c8eb3b-bf30-4b00-af71-e32f0dc19171/01c8eb3b-bf30-4b00-af71-e32f0dc19171.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 987.277364] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 987.277364] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-800a91e2-d960-40fd-8f0d-b1194146ae8c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.280620] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Releasing lock "refresh_cache-74ea584f-b20f-425b-acb3-0ec60e7f2a1e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 987.280620] env[69927]: DEBUG nova.compute.manager [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Instance network_info: |[{"id": "866c0d6a-a2a2-45f3-a0e9-06356a7cc46d", "address": "fa:16:3e:c3:a0:e0", "network": {"id": "86a3ae49-85ef-464f-a650-b4508e7e3da7", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1348743898-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3159998f0e574efbb6241904abb29576", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap866c0d6a-a2", "ovs_interfaceid": "866c0d6a-a2a2-45f3-a0e9-06356a7cc46d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 987.282240] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:a0:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb3425ea-72e7-41e3-92a7-820db9ec4661', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '866c0d6a-a2a2-45f3-a0e9-06356a7cc46d', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 987.290326] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Creating folder: Project (3159998f0e574efbb6241904abb29576). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 987.290729] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Waiting for the task: (returnval){ [ 987.290729] env[69927]: value = "task-4096213" [ 987.290729] env[69927]: _type = "Task" [ 987.290729] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.291689] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b5e30569-ca5c-47f5-9c38-d80628c9dbd4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.299658] env[69927]: INFO nova.scheduler.client.report [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Deleted allocations for instance e1946033-4ec3-4561-afdf-a3b748f7c611 [ 987.310750] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Created folder: Project (3159998f0e574efbb6241904abb29576) in parent group-v811283. [ 987.311617] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Creating folder: Instances. Parent ref: group-v811486. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 987.316168] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d0394800-2eca-4e56-a95b-cbff4556868a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.318698] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': task-4096213, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.328313] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Created folder: Instances in parent group-v811486. [ 987.328640] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 987.328871] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 987.329443] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bbb61cf3-3e91-4aa0-b967-36100162d5e0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.353482] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 987.353482] env[69927]: value = "task-4096216" [ 987.353482] env[69927]: _type = "Task" [ 987.353482] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.363428] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096216, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.545020] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: a0b0ebbe-d715-4bc0-a027-5c34c7d3d0c5] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 987.806095] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': task-4096213, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071271} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.806414] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 987.807287] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c9b834c-f671-488d-9470-b65541378f7d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.816287] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c055b7ac-b1e1-4557-a5d6-971dfc7e4edf tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Lock "e1946033-4ec3-4561-afdf-a3b748f7c611" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 60.689s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.847803] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 01c8eb3b-bf30-4b00-af71-e32f0dc19171/01c8eb3b-bf30-4b00-af71-e32f0dc19171.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 987.848991] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ec09bfd-890d-4b89-a17a-036fe504dffa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.875453] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Waiting for the task: (returnval){ [ 987.875453] env[69927]: value = "task-4096217" [ 987.875453] env[69927]: _type = "Task" [ 987.875453] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.879021] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096216, 'name': CreateVM_Task, 'duration_secs': 0.435532} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.883391] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 987.883513] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.883645] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 987.883984] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 987.884661] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17ca68eb-7826-4c79-b956-7c163b6e2a97 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.891157] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': task-4096217, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.895849] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Waiting for the task: (returnval){ [ 987.895849] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52cf323c-fafa-6956-8faf-212b0151af6c" [ 987.895849] env[69927]: _type = "Task" [ 987.895849] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.906432] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52cf323c-fafa-6956-8faf-212b0151af6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.956855] env[69927]: DEBUG nova.network.neutron [None req-4a595cc7-d52f-4551-92ac-13bfdf7b0d1a tempest-ServersAdminTestJSON-1660947661 tempest-ServersAdminTestJSON-1660947661-project-admin] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Updating instance_info_cache with network_info: [{"id": "1fbc7a57-ea01-478c-8517-9b5d862bf7cc", "address": "fa:16:3e:27:46:31", "network": {"id": "41648ae7-ad4c-489e-b90f-65446dc0b1d1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1522598229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c5a402c4ef2452b9809e30a2fe91431", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c8a5d7c-ee1f-4a41-94e4-db31e85a398d", "external-id": "cl2-zone-613", "segmentation_id": 613, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fbc7a57-ea", "ovs_interfaceid": "1fbc7a57-ea01-478c-8517-9b5d862bf7cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.051015] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 128d0705-21a0-4103-ae84-85bbac7e718b] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 988.058429] env[69927]: DEBUG nova.network.neutron [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Successfully updated port: a4ba62a2-72b5-465c-bd2e-9ff3bff9da1c {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 988.248826] env[69927]: DEBUG oslo_concurrency.lockutils [None req-238496ba-c34d-4b6f-b8de-59bae18230a8 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.250070] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 51.688s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.252168] env[69927]: INFO nova.compute.claims [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 988.257349] env[69927]: DEBUG nova.compute.manager [req-566d9492-2e18-4f54-a54d-515ce763417d req-27013f61-fb70-47d8-bbbf-0ccfad3b4e65 service nova] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Received event network-changed-866c0d6a-a2a2-45f3-a0e9-06356a7cc46d {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 988.257753] env[69927]: DEBUG nova.compute.manager [req-566d9492-2e18-4f54-a54d-515ce763417d req-27013f61-fb70-47d8-bbbf-0ccfad3b4e65 service nova] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Refreshing instance network info cache due to event network-changed-866c0d6a-a2a2-45f3-a0e9-06356a7cc46d. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 988.257884] env[69927]: DEBUG oslo_concurrency.lockutils [req-566d9492-2e18-4f54-a54d-515ce763417d req-27013f61-fb70-47d8-bbbf-0ccfad3b4e65 service nova] Acquiring lock "refresh_cache-74ea584f-b20f-425b-acb3-0ec60e7f2a1e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.257996] env[69927]: DEBUG oslo_concurrency.lockutils [req-566d9492-2e18-4f54-a54d-515ce763417d req-27013f61-fb70-47d8-bbbf-0ccfad3b4e65 service nova] Acquired lock "refresh_cache-74ea584f-b20f-425b-acb3-0ec60e7f2a1e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 988.258182] env[69927]: DEBUG nova.network.neutron [req-566d9492-2e18-4f54-a54d-515ce763417d req-27013f61-fb70-47d8-bbbf-0ccfad3b4e65 service nova] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Refreshing network info cache for port 866c0d6a-a2a2-45f3-a0e9-06356a7cc46d {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 988.388868] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': task-4096217, 'name': ReconfigVM_Task, 'duration_secs': 0.282732} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.389186] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 01c8eb3b-bf30-4b00-af71-e32f0dc19171/01c8eb3b-bf30-4b00-af71-e32f0dc19171.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 988.389827] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0ad1993a-eadc-4fdd-8a92-f83f49374edb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.401019] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Waiting for the task: (returnval){ [ 988.401019] env[69927]: value = "task-4096218" [ 988.401019] env[69927]: _type = "Task" [ 988.401019] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.412732] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52cf323c-fafa-6956-8faf-212b0151af6c, 'name': SearchDatastore_Task, 'duration_secs': 0.015211} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.412732] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 988.412732] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 988.412732] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.412732] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 988.412732] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 988.413159] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-30aceaed-bcc3-4ef9-8257-587e2f94d5ac {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.419026] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': task-4096218, 'name': Rename_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.429183] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 988.429415] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 988.430268] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e2b6cea-0ff1-422d-b784-7eeefcdb265d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.437087] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Waiting for the task: (returnval){ [ 988.437087] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526161b1-aa95-a3fb-dd76-7b9c6a113207" [ 988.437087] env[69927]: _type = "Task" [ 988.437087] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.447119] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526161b1-aa95-a3fb-dd76-7b9c6a113207, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.462937] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4a595cc7-d52f-4551-92ac-13bfdf7b0d1a tempest-ServersAdminTestJSON-1660947661 tempest-ServersAdminTestJSON-1660947661-project-admin] Releasing lock "refresh_cache-67e00c40-35b6-4a9f-9505-19b804e78c04" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 988.463322] env[69927]: DEBUG nova.compute.manager [None req-4a595cc7-d52f-4551-92ac-13bfdf7b0d1a tempest-ServersAdminTestJSON-1660947661 tempest-ServersAdminTestJSON-1660947661-project-admin] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Inject network info {{(pid=69927) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 988.463578] env[69927]: DEBUG nova.compute.manager [None req-4a595cc7-d52f-4551-92ac-13bfdf7b0d1a tempest-ServersAdminTestJSON-1660947661 tempest-ServersAdminTestJSON-1660947661-project-admin] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] network_info to inject: |[{"id": "1fbc7a57-ea01-478c-8517-9b5d862bf7cc", "address": "fa:16:3e:27:46:31", "network": {"id": "41648ae7-ad4c-489e-b90f-65446dc0b1d1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1522598229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c5a402c4ef2452b9809e30a2fe91431", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c8a5d7c-ee1f-4a41-94e4-db31e85a398d", "external-id": "cl2-zone-613", "segmentation_id": 613, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fbc7a57-ea", "ovs_interfaceid": "1fbc7a57-ea01-478c-8517-9b5d862bf7cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 988.468568] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4a595cc7-d52f-4551-92ac-13bfdf7b0d1a tempest-ServersAdminTestJSON-1660947661 tempest-ServersAdminTestJSON-1660947661-project-admin] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Reconfiguring VM instance to set the machine id {{(pid=69927) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 988.469296] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ceffb06c-836a-4452-9df6-a2eed58b3add {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.485425] env[69927]: DEBUG oslo_vmware.api [None req-4a595cc7-d52f-4551-92ac-13bfdf7b0d1a tempest-ServersAdminTestJSON-1660947661 tempest-ServersAdminTestJSON-1660947661-project-admin] Waiting for the task: (returnval){ [ 988.485425] env[69927]: value = "task-4096219" [ 988.485425] env[69927]: _type = "Task" [ 988.485425] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.494690] env[69927]: DEBUG oslo_vmware.api [None req-4a595cc7-d52f-4551-92ac-13bfdf7b0d1a tempest-ServersAdminTestJSON-1660947661 tempest-ServersAdminTestJSON-1660947661-project-admin] Task: {'id': task-4096219, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.560389] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 8442f144-2be4-4634-b151-62f049a975b6] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 988.562964] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Acquiring lock "refresh_cache-c87680be-227e-4a3e-92d3-c2310623bfe4" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.562964] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Acquired lock "refresh_cache-c87680be-227e-4a3e-92d3-c2310623bfe4" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 988.562964] env[69927]: DEBUG nova.network.neutron [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 988.804293] env[69927]: INFO nova.compute.manager [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Swapping old allocation on dict_keys(['2f529b36-df5f-4b37-8103-68f74f737726']) held by migration 640acec6-f769-4d56-a7aa-84655b85e53f for instance [ 988.835718] env[69927]: DEBUG nova.scheduler.client.report [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Overwriting current allocation {'allocations': {'2f529b36-df5f-4b37-8103-68f74f737726': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 107}}, 'project_id': 'be278be46f3d48df818c834df17c663f', 'user_id': 'b66d74a85f3d4d31a4efce8a8df01cc0', 'consumer_generation': 1} on consumer c3e8a429-8484-4b11-abe3-1cccf0992556 {{(pid=69927) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 988.920220] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': task-4096218, 'name': Rename_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.948471] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526161b1-aa95-a3fb-dd76-7b9c6a113207, 'name': SearchDatastore_Task, 'duration_secs': 0.010297} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.949322] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6dadb64e-fa39-422c-babf-826da6f14083 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.955180] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Waiting for the task: (returnval){ [ 988.955180] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528700a8-08a7-6e57-3ef2-cd929b89a028" [ 988.955180] env[69927]: _type = "Task" [ 988.955180] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.964248] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528700a8-08a7-6e57-3ef2-cd929b89a028, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.997958] env[69927]: DEBUG oslo_vmware.api [None req-4a595cc7-d52f-4551-92ac-13bfdf7b0d1a tempest-ServersAdminTestJSON-1660947661 tempest-ServersAdminTestJSON-1660947661-project-admin] Task: {'id': task-4096219, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.015199] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "refresh_cache-c3e8a429-8484-4b11-abe3-1cccf0992556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.015199] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquired lock "refresh_cache-c3e8a429-8484-4b11-abe3-1cccf0992556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 989.015379] env[69927]: DEBUG nova.network.neutron [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 989.065145] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 6b2f2fa5-f303-4d79-ad8f-4a6aa073ea85] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 989.117507] env[69927]: DEBUG nova.network.neutron [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 989.215034] env[69927]: DEBUG nova.network.neutron [req-566d9492-2e18-4f54-a54d-515ce763417d req-27013f61-fb70-47d8-bbbf-0ccfad3b4e65 service nova] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Updated VIF entry in instance network info cache for port 866c0d6a-a2a2-45f3-a0e9-06356a7cc46d. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 989.215290] env[69927]: DEBUG nova.network.neutron [req-566d9492-2e18-4f54-a54d-515ce763417d req-27013f61-fb70-47d8-bbbf-0ccfad3b4e65 service nova] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Updating instance_info_cache with network_info: [{"id": "866c0d6a-a2a2-45f3-a0e9-06356a7cc46d", "address": "fa:16:3e:c3:a0:e0", "network": {"id": "86a3ae49-85ef-464f-a650-b4508e7e3da7", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1348743898-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3159998f0e574efbb6241904abb29576", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap866c0d6a-a2", "ovs_interfaceid": "866c0d6a-a2a2-45f3-a0e9-06356a7cc46d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.413252] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': task-4096218, 'name': Rename_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.443281] env[69927]: DEBUG nova.network.neutron [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Updating instance_info_cache with network_info: [{"id": "a4ba62a2-72b5-465c-bd2e-9ff3bff9da1c", "address": "fa:16:3e:f9:6b:7b", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4ba62a2-72", "ovs_interfaceid": "a4ba62a2-72b5-465c-bd2e-9ff3bff9da1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.479432] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528700a8-08a7-6e57-3ef2-cd929b89a028, 'name': SearchDatastore_Task, 'duration_secs': 0.010166} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.479538] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 989.479925] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 74ea584f-b20f-425b-acb3-0ec60e7f2a1e/74ea584f-b20f-425b-acb3-0ec60e7f2a1e.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 989.480056] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-228eb65f-9953-4e16-a5e7-4e1d920b07c6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.493513] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Waiting for the task: (returnval){ [ 989.493513] env[69927]: value = "task-4096220" [ 989.493513] env[69927]: _type = "Task" [ 989.493513] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.506705] env[69927]: DEBUG oslo_vmware.api [None req-4a595cc7-d52f-4551-92ac-13bfdf7b0d1a tempest-ServersAdminTestJSON-1660947661 tempest-ServersAdminTestJSON-1660947661-project-admin] Task: {'id': task-4096219, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.512899] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Task: {'id': task-4096220, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.517391] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquiring lock "cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 989.518202] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 989.571878] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: ee422a46-c6e4-4098-8f74-b9f0779d0fba] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 989.724273] env[69927]: DEBUG oslo_concurrency.lockutils [req-566d9492-2e18-4f54-a54d-515ce763417d req-27013f61-fb70-47d8-bbbf-0ccfad3b4e65 service nova] Releasing lock "refresh_cache-74ea584f-b20f-425b-acb3-0ec60e7f2a1e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 989.835730] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6046c6f8-3208-49a6-af37-3023874dac72 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.844196] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c473d1-389c-4ced-8ccc-b12864123de1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.879887] env[69927]: DEBUG nova.network.neutron [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Updating instance_info_cache with network_info: [{"id": "32049b49-d761-48ff-8938-d76ebe86f62e", "address": "fa:16:3e:64:c8:10", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.140", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32049b49-d7", "ovs_interfaceid": "32049b49-d761-48ff-8938-d76ebe86f62e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.882694] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b301cab-f78d-40d3-bd53-5b45d0127b36 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.892696] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-455f3819-269a-48d2-a2af-82942ab6ca0a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.914456] env[69927]: DEBUG nova.compute.provider_tree [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 989.928448] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': task-4096218, 'name': Rename_Task, 'duration_secs': 1.1587} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.928888] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 989.929230] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1616325c-1f85-4a79-ba41-a9068811781b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.942149] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Waiting for the task: (returnval){ [ 989.942149] env[69927]: value = "task-4096221" [ 989.942149] env[69927]: _type = "Task" [ 989.942149] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.947516] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Releasing lock "refresh_cache-c87680be-227e-4a3e-92d3-c2310623bfe4" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 989.948008] env[69927]: DEBUG nova.compute.manager [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Instance network_info: |[{"id": "a4ba62a2-72b5-465c-bd2e-9ff3bff9da1c", "address": "fa:16:3e:f9:6b:7b", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4ba62a2-72", "ovs_interfaceid": "a4ba62a2-72b5-465c-bd2e-9ff3bff9da1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 989.949218] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:6b:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '10b81051-1eb1-406b-888c-4548c470c77e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a4ba62a2-72b5-465c-bd2e-9ff3bff9da1c', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 989.958021] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 989.958895] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 989.959231] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7087554a-4a85-4d2f-b8ad-8257350ff4bd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.981757] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': task-4096221, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.985967] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 989.985967] env[69927]: value = "task-4096222" [ 989.985967] env[69927]: _type = "Task" [ 989.985967] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.005435] env[69927]: DEBUG oslo_vmware.api [None req-4a595cc7-d52f-4551-92ac-13bfdf7b0d1a tempest-ServersAdminTestJSON-1660947661 tempest-ServersAdminTestJSON-1660947661-project-admin] Task: {'id': task-4096219, 'name': ReconfigVM_Task, 'duration_secs': 1.224477} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.005850] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096222, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.006716] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4a595cc7-d52f-4551-92ac-13bfdf7b0d1a tempest-ServersAdminTestJSON-1660947661 tempest-ServersAdminTestJSON-1660947661-project-admin] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Reconfigured VM instance to set the machine id {{(pid=69927) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 990.013231] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Task: {'id': task-4096220, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.083353] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 8edafb98-331a-45b8-8de8-4ba04b035ffd] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 990.387128] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Releasing lock "refresh_cache-c3e8a429-8484-4b11-abe3-1cccf0992556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.387801] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 990.388223] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-530ba168-44d9-47d1-bdcd-b6d5fbd4cb6d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.396849] env[69927]: DEBUG oslo_vmware.api [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 990.396849] env[69927]: value = "task-4096223" [ 990.396849] env[69927]: _type = "Task" [ 990.396849] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.406453] env[69927]: DEBUG oslo_vmware.api [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096223, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.420716] env[69927]: DEBUG nova.scheduler.client.report [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 990.438536] env[69927]: DEBUG nova.compute.manager [req-40b40741-937c-45de-a826-17fdb3729202 req-bfc191a7-895b-4e46-a613-c6c430319a45 service nova] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Received event network-vif-plugged-a4ba62a2-72b5-465c-bd2e-9ff3bff9da1c {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 990.438820] env[69927]: DEBUG oslo_concurrency.lockutils [req-40b40741-937c-45de-a826-17fdb3729202 req-bfc191a7-895b-4e46-a613-c6c430319a45 service nova] Acquiring lock "c87680be-227e-4a3e-92d3-c2310623bfe4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 990.439703] env[69927]: DEBUG oslo_concurrency.lockutils [req-40b40741-937c-45de-a826-17fdb3729202 req-bfc191a7-895b-4e46-a613-c6c430319a45 service nova] Lock "c87680be-227e-4a3e-92d3-c2310623bfe4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.439703] env[69927]: DEBUG oslo_concurrency.lockutils [req-40b40741-937c-45de-a826-17fdb3729202 req-bfc191a7-895b-4e46-a613-c6c430319a45 service nova] Lock "c87680be-227e-4a3e-92d3-c2310623bfe4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.439703] env[69927]: DEBUG nova.compute.manager [req-40b40741-937c-45de-a826-17fdb3729202 req-bfc191a7-895b-4e46-a613-c6c430319a45 service nova] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] No waiting events found dispatching network-vif-plugged-a4ba62a2-72b5-465c-bd2e-9ff3bff9da1c {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 990.439703] env[69927]: WARNING nova.compute.manager [req-40b40741-937c-45de-a826-17fdb3729202 req-bfc191a7-895b-4e46-a613-c6c430319a45 service nova] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Received unexpected event network-vif-plugged-a4ba62a2-72b5-465c-bd2e-9ff3bff9da1c for instance with vm_state building and task_state spawning. [ 990.439919] env[69927]: DEBUG nova.compute.manager [req-40b40741-937c-45de-a826-17fdb3729202 req-bfc191a7-895b-4e46-a613-c6c430319a45 service nova] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Received event network-changed-a4ba62a2-72b5-465c-bd2e-9ff3bff9da1c {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 990.441030] env[69927]: DEBUG nova.compute.manager [req-40b40741-937c-45de-a826-17fdb3729202 req-bfc191a7-895b-4e46-a613-c6c430319a45 service nova] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Refreshing instance network info cache due to event network-changed-a4ba62a2-72b5-465c-bd2e-9ff3bff9da1c. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 990.441030] env[69927]: DEBUG oslo_concurrency.lockutils [req-40b40741-937c-45de-a826-17fdb3729202 req-bfc191a7-895b-4e46-a613-c6c430319a45 service nova] Acquiring lock "refresh_cache-c87680be-227e-4a3e-92d3-c2310623bfe4" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.441030] env[69927]: DEBUG oslo_concurrency.lockutils [req-40b40741-937c-45de-a826-17fdb3729202 req-bfc191a7-895b-4e46-a613-c6c430319a45 service nova] Acquired lock "refresh_cache-c87680be-227e-4a3e-92d3-c2310623bfe4" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.441030] env[69927]: DEBUG nova.network.neutron [req-40b40741-937c-45de-a826-17fdb3729202 req-bfc191a7-895b-4e46-a613-c6c430319a45 service nova] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Refreshing network info cache for port a4ba62a2-72b5-465c-bd2e-9ff3bff9da1c {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 990.454091] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': task-4096221, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.496737] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096222, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.506544] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Task: {'id': task-4096220, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.776755} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.506761] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 74ea584f-b20f-425b-acb3-0ec60e7f2a1e/74ea584f-b20f-425b-acb3-0ec60e7f2a1e.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 990.507011] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 990.507281] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9aa3aa3e-ea8e-46d8-9cba-4eab9bd0e5ce {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.515302] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Waiting for the task: (returnval){ [ 990.515302] env[69927]: value = "task-4096224" [ 990.515302] env[69927]: _type = "Task" [ 990.515302] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.526171] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Task: {'id': task-4096224, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.586062] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: b1bcbcfb-2320-434c-901f-0f6a476a3069] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 990.906947] env[69927]: DEBUG oslo_vmware.api [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096223, 'name': PowerOffVM_Task, 'duration_secs': 0.256687} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.907255] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 990.907930] env[69927]: DEBUG nova.virt.hardware [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:38:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='6803b7fa-1500-4d6d-8f68-b7ab4453032d',id=32,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1704385790',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 990.908156] env[69927]: DEBUG nova.virt.hardware [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 990.908322] env[69927]: DEBUG nova.virt.hardware [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 990.908496] env[69927]: DEBUG nova.virt.hardware [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 990.908643] env[69927]: DEBUG nova.virt.hardware [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 990.908790] env[69927]: DEBUG nova.virt.hardware [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 990.908994] env[69927]: DEBUG nova.virt.hardware [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 990.909165] env[69927]: DEBUG nova.virt.hardware [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 990.909339] env[69927]: DEBUG nova.virt.hardware [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 990.909496] env[69927]: DEBUG nova.virt.hardware [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 990.909668] env[69927]: DEBUG nova.virt.hardware [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 990.914670] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0bcd8db2-5a94-4c57-b435-c2de020c1e17 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.925616] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.676s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.926105] env[69927]: DEBUG nova.compute.manager [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 990.930858] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 49.408s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.931087] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.933430] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.276s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.935028] env[69927]: INFO nova.compute.claims [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 990.943415] env[69927]: DEBUG oslo_vmware.api [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 990.943415] env[69927]: value = "task-4096225" [ 990.943415] env[69927]: _type = "Task" [ 990.943415] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.957500] env[69927]: DEBUG oslo_vmware.api [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': task-4096221, 'name': PowerOnVM_Task, 'duration_secs': 0.753406} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.960765] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 990.960984] env[69927]: INFO nova.compute.manager [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Took 10.22 seconds to spawn the instance on the hypervisor. [ 990.961208] env[69927]: DEBUG nova.compute.manager [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 990.961585] env[69927]: DEBUG oslo_vmware.api [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096225, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.962245] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd584e4-f0a1-416a-98fb-d7d22cc3a4dd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.977171] env[69927]: INFO nova.scheduler.client.report [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Deleted allocations for instance 9363c664-5848-408b-9b03-2dea4ceded90 [ 990.996767] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096222, 'name': CreateVM_Task, 'duration_secs': 0.572529} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.996939] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 990.997668] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.997829] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.998165] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 990.998417] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ed3a798-f417-405d-b17c-c41d950ac79e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.004345] env[69927]: DEBUG oslo_vmware.api [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Waiting for the task: (returnval){ [ 991.004345] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52df1b7f-7b1b-a6d7-40a2-809f8fc54edc" [ 991.004345] env[69927]: _type = "Task" [ 991.004345] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.016885] env[69927]: DEBUG oslo_vmware.api [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52df1b7f-7b1b-a6d7-40a2-809f8fc54edc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.025280] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Task: {'id': task-4096224, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078944} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.025280] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 991.025528] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1731bef-0280-4b6a-9ff8-e3d81ce3a88b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.047825] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 74ea584f-b20f-425b-acb3-0ec60e7f2a1e/74ea584f-b20f-425b-acb3-0ec60e7f2a1e.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 991.048975] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b91216a-1ad0-496e-ab48-d8dfe984dbdc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.068733] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Waiting for the task: (returnval){ [ 991.068733] env[69927]: value = "task-4096226" [ 991.068733] env[69927]: _type = "Task" [ 991.068733] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.080298] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Task: {'id': task-4096226, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.089446] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: ab8a8acc-cab7-4a82-bd90-b34147f17b0e] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 991.311970] env[69927]: DEBUG nova.network.neutron [req-40b40741-937c-45de-a826-17fdb3729202 req-bfc191a7-895b-4e46-a613-c6c430319a45 service nova] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Updated VIF entry in instance network info cache for port a4ba62a2-72b5-465c-bd2e-9ff3bff9da1c. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 991.312566] env[69927]: DEBUG nova.network.neutron [req-40b40741-937c-45de-a826-17fdb3729202 req-bfc191a7-895b-4e46-a613-c6c430319a45 service nova] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Updating instance_info_cache with network_info: [{"id": "a4ba62a2-72b5-465c-bd2e-9ff3bff9da1c", "address": "fa:16:3e:f9:6b:7b", "network": {"id": "c0201332-3c11-48a5-acab-7fb9d1e7c65f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cf0555225f1046838a534888181ecd96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4ba62a2-72", "ovs_interfaceid": "a4ba62a2-72b5-465c-bd2e-9ff3bff9da1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.440269] env[69927]: DEBUG nova.compute.utils [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 991.444695] env[69927]: DEBUG nova.compute.manager [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 991.444888] env[69927]: DEBUG nova.network.neutron [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 991.461019] env[69927]: DEBUG oslo_vmware.api [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096225, 'name': ReconfigVM_Task, 'duration_secs': 0.352812} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.461623] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f294258e-5712-4c0e-b020-8752252585a3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.494030] env[69927]: DEBUG nova.virt.hardware [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:38:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='6803b7fa-1500-4d6d-8f68-b7ab4453032d',id=32,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1704385790',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 991.494272] env[69927]: DEBUG nova.virt.hardware [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 991.494432] env[69927]: DEBUG nova.virt.hardware [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 991.494607] env[69927]: DEBUG nova.virt.hardware [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 991.496017] env[69927]: DEBUG nova.virt.hardware [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 991.496017] env[69927]: DEBUG nova.virt.hardware [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 991.496017] env[69927]: DEBUG nova.virt.hardware [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 991.496017] env[69927]: DEBUG nova.virt.hardware [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 991.496017] env[69927]: DEBUG nova.virt.hardware [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 991.496017] env[69927]: DEBUG nova.virt.hardware [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 991.496463] env[69927]: DEBUG nova.virt.hardware [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 991.497515] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f3692880-fde4-4871-b5e8-e5c8c0ffd0f3 tempest-ServersListShow298Test-1520130868 tempest-ServersListShow298Test-1520130868-project-member] Lock "9363c664-5848-408b-9b03-2dea4ceded90" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 54.227s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.500361] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51859c14-d79e-4c98-8ce6-74b31d01ce15 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.503708] env[69927]: INFO nova.compute.manager [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Took 72.71 seconds to build instance. [ 991.513328] env[69927]: DEBUG oslo_vmware.api [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 991.513328] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5249803d-1d3e-be00-35d2-4ed26f152a5e" [ 991.513328] env[69927]: _type = "Task" [ 991.513328] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.522718] env[69927]: DEBUG nova.policy [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c5fbc5d9cbe54152952e259cde2e22a6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '71f04d90c8ca48209037157448596060', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 991.525427] env[69927]: DEBUG oslo_vmware.api [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52df1b7f-7b1b-a6d7-40a2-809f8fc54edc, 'name': SearchDatastore_Task, 'duration_secs': 0.017906} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.526292] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 991.526648] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 991.526913] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.527424] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 991.527692] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 991.532047] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-923ffdaa-af78-452b-a9e6-4673a623fdfe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.533993] env[69927]: DEBUG oslo_vmware.api [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5249803d-1d3e-be00-35d2-4ed26f152a5e, 'name': SearchDatastore_Task, 'duration_secs': 0.016248} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.540525] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Reconfiguring VM instance instance-0000002f to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 991.541520] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb16dfc7-6757-46d8-8667-0367b42d4711 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.558896] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 991.559149] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 991.560289] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ff3b399-2b31-4288-a2e9-fd8cf162772e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.565522] env[69927]: DEBUG oslo_vmware.api [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 991.565522] env[69927]: value = "task-4096227" [ 991.565522] env[69927]: _type = "Task" [ 991.565522] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.569396] env[69927]: DEBUG oslo_vmware.api [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Waiting for the task: (returnval){ [ 991.569396] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526c4e74-fd3e-2349-40f3-fb9dbac2d3d1" [ 991.569396] env[69927]: _type = "Task" [ 991.569396] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.588893] env[69927]: DEBUG oslo_vmware.api [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096227, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.589450] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Task: {'id': task-4096226, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.592517] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 4bf59fae-8029-421b-95fd-a0d008891ce7] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 991.594323] env[69927]: DEBUG oslo_vmware.api [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526c4e74-fd3e-2349-40f3-fb9dbac2d3d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.815450] env[69927]: DEBUG oslo_concurrency.lockutils [req-40b40741-937c-45de-a826-17fdb3729202 req-bfc191a7-895b-4e46-a613-c6c430319a45 service nova] Releasing lock "refresh_cache-c87680be-227e-4a3e-92d3-c2310623bfe4" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 991.945503] env[69927]: DEBUG nova.compute.manager [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 992.006675] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f1c128f8-472c-4fad-8e79-26405da21a25 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Lock "01c8eb3b-bf30-4b00-af71-e32f0dc19171" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.969s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 992.085758] env[69927]: DEBUG nova.network.neutron [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Successfully created port: 3edeb122-1a25-4dcd-93fc-1dcf798a6da1 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 992.087638] env[69927]: DEBUG oslo_vmware.api [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096227, 'name': ReconfigVM_Task, 'duration_secs': 0.361244} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.091671] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Reconfigured VM instance instance-0000002f to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 992.091671] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d076ef38-24ed-4d90-95c8-1ee4cd7754d5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.101913] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 7ce79e41-333a-4ef3-ba68-f74067d4ac5a] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 992.103824] env[69927]: DEBUG oslo_vmware.api [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526c4e74-fd3e-2349-40f3-fb9dbac2d3d1, 'name': SearchDatastore_Task, 'duration_secs': 0.024382} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.104057] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Task: {'id': task-4096226, 'name': ReconfigVM_Task, 'duration_secs': 0.807825} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.106864] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 74ea584f-b20f-425b-acb3-0ec60e7f2a1e/74ea584f-b20f-425b-acb3-0ec60e7f2a1e.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 992.106864] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e0699a9-d426-4640-b07c-21e2880a5b74 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.108979] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7b2f39c4-7926-4496-88f9-a45d24cec7e9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.133141] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] c3e8a429-8484-4b11-abe3-1cccf0992556/c3e8a429-8484-4b11-abe3-1cccf0992556.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 992.138424] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc383a0b-45dc-48c6-8310-350cf7e5a5d1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.155809] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Waiting for the task: (returnval){ [ 992.155809] env[69927]: value = "task-4096228" [ 992.155809] env[69927]: _type = "Task" [ 992.155809] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.156127] env[69927]: DEBUG oslo_vmware.api [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Waiting for the task: (returnval){ [ 992.156127] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5211fda3-04ae-a966-ecc5-2645e804cc41" [ 992.156127] env[69927]: _type = "Task" [ 992.156127] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.165329] env[69927]: DEBUG oslo_vmware.api [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 992.165329] env[69927]: value = "task-4096229" [ 992.165329] env[69927]: _type = "Task" [ 992.165329] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.180337] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Task: {'id': task-4096228, 'name': Rename_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.180632] env[69927]: DEBUG oslo_vmware.api [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5211fda3-04ae-a966-ecc5-2645e804cc41, 'name': SearchDatastore_Task, 'duration_secs': 0.025267} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.184406] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 992.184701] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] c87680be-227e-4a3e-92d3-c2310623bfe4/c87680be-227e-4a3e-92d3-c2310623bfe4.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 992.185194] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-23c4b34c-3c1b-4089-a18a-47b6a7ff04b2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.191165] env[69927]: DEBUG oslo_vmware.api [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096229, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.199683] env[69927]: DEBUG oslo_vmware.api [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Waiting for the task: (returnval){ [ 992.199683] env[69927]: value = "task-4096230" [ 992.199683] env[69927]: _type = "Task" [ 992.199683] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.210392] env[69927]: DEBUG oslo_vmware.api [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096230, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.509547] env[69927]: DEBUG nova.compute.manager [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 992.533128] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f70ae14-5d40-4499-bdd5-21f41301c5b2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.545163] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9a5da0f-860c-4a23-99c7-7dae0c387b53 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.586126] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-031f7cce-9473-44c0-8126-bf0a6398f0f9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.597217] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec081452-0572-4c95-b8ab-6202aa0a41fe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.605920] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: c45d2259-2a05-49d5-81eb-4c79ced83121] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 992.617449] env[69927]: DEBUG nova.compute.provider_tree [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 992.670533] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Task: {'id': task-4096228, 'name': Rename_Task, 'duration_secs': 0.173583} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.674362] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 992.675111] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d92a9ecd-5f07-4bb6-b85e-c3b8aaf4ee95 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.685458] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Waiting for the task: (returnval){ [ 992.685458] env[69927]: value = "task-4096231" [ 992.685458] env[69927]: _type = "Task" [ 992.685458] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.688956] env[69927]: DEBUG oslo_vmware.api [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096229, 'name': ReconfigVM_Task, 'duration_secs': 0.356868} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.692687] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Reconfigured VM instance instance-0000002f to attach disk [datastore2] c3e8a429-8484-4b11-abe3-1cccf0992556/c3e8a429-8484-4b11-abe3-1cccf0992556.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 992.693711] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c34306b8-f125-4707-b937-fed59fb3132a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.707100] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Task: {'id': task-4096231, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.727528] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fcf2058-e0da-45bb-be21-c9d7128ffab9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.734351] env[69927]: DEBUG oslo_vmware.api [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096230, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.532105} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.735251] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] c87680be-227e-4a3e-92d3-c2310623bfe4/c87680be-227e-4a3e-92d3-c2310623bfe4.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 992.735440] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 992.735624] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-03ea0c0c-2ce9-43b2-aa4b-1091bb4f641a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.752332] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2abd9fde-9ffb-4612-bfdd-40a66504d5b9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.756604] env[69927]: DEBUG oslo_vmware.api [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Waiting for the task: (returnval){ [ 992.756604] env[69927]: value = "task-4096232" [ 992.756604] env[69927]: _type = "Task" [ 992.756604] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.782838] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17918e2d-7cc6-447d-9786-cce35dcbcadb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.789474] env[69927]: DEBUG oslo_vmware.api [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096232, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.794922] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 992.795241] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c7046489-3991-4a3f-90a3-0b7aed325283 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.803372] env[69927]: DEBUG oslo_vmware.api [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 992.803372] env[69927]: value = "task-4096233" [ 992.803372] env[69927]: _type = "Task" [ 992.803372] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.812706] env[69927]: DEBUG oslo_concurrency.lockutils [None req-67221774-56b9-4724-af96-93dfb4e5ed02 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Acquiring lock "01c8eb3b-bf30-4b00-af71-e32f0dc19171" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.812967] env[69927]: DEBUG oslo_concurrency.lockutils [None req-67221774-56b9-4724-af96-93dfb4e5ed02 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Lock "01c8eb3b-bf30-4b00-af71-e32f0dc19171" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 992.813229] env[69927]: INFO nova.compute.manager [None req-67221774-56b9-4724-af96-93dfb4e5ed02 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Rebooting instance [ 992.814592] env[69927]: DEBUG oslo_vmware.api [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096233, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.962533] env[69927]: DEBUG nova.compute.manager [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 993.002691] env[69927]: DEBUG nova.virt.hardware [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 993.002996] env[69927]: DEBUG nova.virt.hardware [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 993.003184] env[69927]: DEBUG nova.virt.hardware [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 993.003408] env[69927]: DEBUG nova.virt.hardware [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 993.003544] env[69927]: DEBUG nova.virt.hardware [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 993.003708] env[69927]: DEBUG nova.virt.hardware [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 993.003923] env[69927]: DEBUG nova.virt.hardware [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 993.004098] env[69927]: DEBUG nova.virt.hardware [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 993.004271] env[69927]: DEBUG nova.virt.hardware [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 993.004436] env[69927]: DEBUG nova.virt.hardware [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 993.004610] env[69927]: DEBUG nova.virt.hardware [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 993.005549] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b049e9-a211-4b61-901b-47779fcf4fc3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.015032] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fe8ec12-321d-4a6c-be14-aa2395b408eb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.040691] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.120198] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 2cdfda66-1d93-4960-a129-2788f10fa593] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 993.124368] env[69927]: DEBUG nova.scheduler.client.report [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 993.199847] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Task: {'id': task-4096231, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.271037] env[69927]: DEBUG oslo_vmware.api [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096232, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06794} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.271560] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 993.272878] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba0568b6-8ca0-42f9-8cf5-47eff14851e2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.303121] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] c87680be-227e-4a3e-92d3-c2310623bfe4/c87680be-227e-4a3e-92d3-c2310623bfe4.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 993.303121] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd0c5e5e-72f4-469b-96e1-2e7b4c02d167 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.335498] env[69927]: DEBUG oslo_vmware.api [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096233, 'name': PowerOnVM_Task, 'duration_secs': 0.483614} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.335854] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 993.339766] env[69927]: DEBUG oslo_vmware.api [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Waiting for the task: (returnval){ [ 993.339766] env[69927]: value = "task-4096234" [ 993.339766] env[69927]: _type = "Task" [ 993.339766] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.341333] env[69927]: DEBUG oslo_concurrency.lockutils [None req-67221774-56b9-4724-af96-93dfb4e5ed02 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Acquiring lock "refresh_cache-01c8eb3b-bf30-4b00-af71-e32f0dc19171" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.341333] env[69927]: DEBUG oslo_concurrency.lockutils [None req-67221774-56b9-4724-af96-93dfb4e5ed02 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Acquired lock "refresh_cache-01c8eb3b-bf30-4b00-af71-e32f0dc19171" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 993.341511] env[69927]: DEBUG nova.network.neutron [None req-67221774-56b9-4724-af96-93dfb4e5ed02 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 993.353770] env[69927]: DEBUG oslo_vmware.api [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096234, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.628315] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: c3c36508-96e1-4e75-931b-c7f0740b74e1] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 993.630869] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.698s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.632081] env[69927]: DEBUG nova.compute.manager [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 993.635026] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 46.142s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.635026] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.637762] env[69927]: DEBUG oslo_concurrency.lockutils [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.997s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.637762] env[69927]: DEBUG oslo_concurrency.lockutils [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.639943] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.705s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.642068] env[69927]: INFO nova.compute.claims [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 993.676855] env[69927]: INFO nova.scheduler.client.report [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Deleted allocations for instance 358ecaef-37f0-42be-acce-00f389650c97 [ 993.677212] env[69927]: INFO nova.scheduler.client.report [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Deleted allocations for instance 8be7e64c-7bc6-41a0-ada5-0a5057a2af45 [ 993.700320] env[69927]: DEBUG oslo_vmware.api [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Task: {'id': task-4096231, 'name': PowerOnVM_Task, 'duration_secs': 0.683162} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.700624] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 993.700877] env[69927]: INFO nova.compute.manager [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Took 10.33 seconds to spawn the instance on the hypervisor. [ 993.701093] env[69927]: DEBUG nova.compute.manager [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 993.701907] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86cbe0ad-5211-4a79-af3a-26f412e4ca59 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.862688] env[69927]: DEBUG oslo_vmware.api [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096234, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.870275] env[69927]: INFO nova.compute.manager [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Rebuilding instance [ 993.923218] env[69927]: DEBUG nova.compute.manager [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 993.924394] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c9c035-21a2-4e64-9c42-5210d6f7b29f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.135590] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 053f6f00-a818-473b-a887-4ec45174c1d5] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 994.152030] env[69927]: DEBUG nova.compute.utils [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 994.157122] env[69927]: DEBUG nova.compute.manager [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 994.157310] env[69927]: DEBUG nova.network.neutron [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 994.194652] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6f38a2ee-14c2-421f-9de7-574ea5ed011c tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "8be7e64c-7bc6-41a0-ada5-0a5057a2af45" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.279s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.195874] env[69927]: DEBUG oslo_concurrency.lockutils [None req-890720cb-6e34-4e00-919d-c6586d904c05 tempest-MultipleCreateTestJSON-1096518746 tempest-MultipleCreateTestJSON-1096518746-project-member] Lock "358ecaef-37f0-42be-acce-00f389650c97" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.186s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.207604] env[69927]: DEBUG nova.policy [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd89d0a2232b4da1a0b88799062fe8da', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3afde63c8cbe4aecb32a470fd6b948f6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 994.223425] env[69927]: INFO nova.compute.manager [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Took 71.07 seconds to build instance. [ 994.354318] env[69927]: DEBUG oslo_vmware.api [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096234, 'name': ReconfigVM_Task, 'duration_secs': 0.986728} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.354741] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Reconfigured VM instance instance-00000045 to attach disk [datastore1] c87680be-227e-4a3e-92d3-c2310623bfe4/c87680be-227e-4a3e-92d3-c2310623bfe4.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 994.357254] env[69927]: INFO nova.compute.manager [None req-cd1ddc27-fc5e-4c07-b3bd-772364f18571 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Updating instance to original state: 'active' [ 994.359942] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-74dd95e5-afcc-4991-8a60-a7685f14f025 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.367468] env[69927]: DEBUG oslo_vmware.api [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Waiting for the task: (returnval){ [ 994.367468] env[69927]: value = "task-4096235" [ 994.367468] env[69927]: _type = "Task" [ 994.367468] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.371788] env[69927]: DEBUG nova.network.neutron [None req-67221774-56b9-4724-af96-93dfb4e5ed02 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Updating instance_info_cache with network_info: [{"id": "09ee92f2-f98d-47b8-81a8-b99cdce409e0", "address": "fa:16:3e:ff:4e:70", "network": {"id": "f56f2bef-2712-4a3f-95b7-10160511508e", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1137847455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d0467960094b28b33ccb3a692e46fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09ee92f2-f9", "ovs_interfaceid": "09ee92f2-f98d-47b8-81a8-b99cdce409e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.380897] env[69927]: DEBUG oslo_vmware.api [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096235, 'name': Rename_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.508553] env[69927]: DEBUG oslo_concurrency.lockutils [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquiring lock "e0bca101-cf8d-48e1-a331-b0018548593e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.508822] env[69927]: DEBUG oslo_concurrency.lockutils [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Lock "e0bca101-cf8d-48e1-a331-b0018548593e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.509035] env[69927]: DEBUG oslo_concurrency.lockutils [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquiring lock "e0bca101-cf8d-48e1-a331-b0018548593e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.509217] env[69927]: DEBUG oslo_concurrency.lockutils [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Lock "e0bca101-cf8d-48e1-a331-b0018548593e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.509383] env[69927]: DEBUG oslo_concurrency.lockutils [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Lock "e0bca101-cf8d-48e1-a331-b0018548593e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.512027] env[69927]: INFO nova.compute.manager [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Terminating instance [ 994.514588] env[69927]: DEBUG nova.network.neutron [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Successfully created port: 89f92ae9-a8f4-402a-b248-cc3ad9bf67d7 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 994.526099] env[69927]: DEBUG nova.network.neutron [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Successfully updated port: 3edeb122-1a25-4dcd-93fc-1dcf798a6da1 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 994.526737] env[69927]: WARNING oslo_messaging._drivers.amqpdriver [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 994.580751] env[69927]: DEBUG nova.compute.manager [req-750b93e6-6a82-4184-a4a9-1a52e3afacc1 req-65b3f45b-b07d-47a3-b26a-c180dc242969 service nova] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Received event network-vif-plugged-3edeb122-1a25-4dcd-93fc-1dcf798a6da1 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 994.581038] env[69927]: DEBUG oslo_concurrency.lockutils [req-750b93e6-6a82-4184-a4a9-1a52e3afacc1 req-65b3f45b-b07d-47a3-b26a-c180dc242969 service nova] Acquiring lock "7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.581301] env[69927]: DEBUG oslo_concurrency.lockutils [req-750b93e6-6a82-4184-a4a9-1a52e3afacc1 req-65b3f45b-b07d-47a3-b26a-c180dc242969 service nova] Lock "7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.581530] env[69927]: DEBUG oslo_concurrency.lockutils [req-750b93e6-6a82-4184-a4a9-1a52e3afacc1 req-65b3f45b-b07d-47a3-b26a-c180dc242969 service nova] Lock "7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.581757] env[69927]: DEBUG nova.compute.manager [req-750b93e6-6a82-4184-a4a9-1a52e3afacc1 req-65b3f45b-b07d-47a3-b26a-c180dc242969 service nova] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] No waiting events found dispatching network-vif-plugged-3edeb122-1a25-4dcd-93fc-1dcf798a6da1 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 994.581957] env[69927]: WARNING nova.compute.manager [req-750b93e6-6a82-4184-a4a9-1a52e3afacc1 req-65b3f45b-b07d-47a3-b26a-c180dc242969 service nova] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Received unexpected event network-vif-plugged-3edeb122-1a25-4dcd-93fc-1dcf798a6da1 for instance with vm_state building and task_state spawning. [ 994.640469] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 820c50b9-3c18-41bc-a000-22425b1dbb27] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 994.671290] env[69927]: DEBUG nova.compute.manager [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 994.725879] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b8bd4778-1d14-4663-9565-e9d26ddafbf3 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Lock "74ea584f-b20f-425b-acb3-0ec60e7f2a1e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.101s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.876157] env[69927]: DEBUG oslo_concurrency.lockutils [None req-67221774-56b9-4724-af96-93dfb4e5ed02 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Releasing lock "refresh_cache-01c8eb3b-bf30-4b00-af71-e32f0dc19171" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 994.881592] env[69927]: DEBUG oslo_vmware.api [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096235, 'name': Rename_Task, 'duration_secs': 0.392122} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.885146] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 994.885593] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ed7e751-44e5-46a9-9a6c-38e9b22ac042 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.895628] env[69927]: DEBUG oslo_vmware.api [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Waiting for the task: (returnval){ [ 994.895628] env[69927]: value = "task-4096236" [ 994.895628] env[69927]: _type = "Task" [ 994.895628] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.914070] env[69927]: DEBUG oslo_vmware.api [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096236, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.942871] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 994.942871] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4ab4d350-203d-4226-849c-d65b2fa83f68 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.950777] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 994.950777] env[69927]: value = "task-4096237" [ 994.950777] env[69927]: _type = "Task" [ 994.950777] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.964764] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096237, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.025021] env[69927]: DEBUG nova.compute.manager [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 995.032273] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 995.032273] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1a83eb0-1d34-4f47-926e-7e2e9e2e2f25 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.035742] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Acquiring lock "refresh_cache-7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.035953] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Acquired lock "refresh_cache-7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 995.036156] env[69927]: DEBUG nova.network.neutron [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 995.045077] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 995.045461] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0995773a-88aa-425b-8fee-e575779df2dd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.057560] env[69927]: DEBUG oslo_vmware.api [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Waiting for the task: (returnval){ [ 995.057560] env[69927]: value = "task-4096238" [ 995.057560] env[69927]: _type = "Task" [ 995.057560] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.069508] env[69927]: DEBUG oslo_vmware.api [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4096238, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.144279] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 4ad26720-ed24-4963-9519-3345dbfeb9a2] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 995.229532] env[69927]: DEBUG nova.compute.manager [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 995.238543] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c30ff1-86fa-42a9-92aa-62d41dc878f5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.256375] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da10143-e09f-414f-bdda-c88ff0703a86 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.306698] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a05567e-7e0f-4ddc-a61d-c11cd162c207 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.315170] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3cde554-5a02-4c3b-b8c1-d7da63ab4156 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.331646] env[69927]: DEBUG nova.compute.provider_tree [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 995.380878] env[69927]: DEBUG nova.compute.manager [None req-67221774-56b9-4724-af96-93dfb4e5ed02 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 995.382180] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4428c8c-df55-4e4a-81ac-023bafb64ae8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.410589] env[69927]: DEBUG oslo_vmware.api [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096236, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.467170] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096237, 'name': PowerOffVM_Task, 'duration_secs': 0.235343} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.467740] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 995.468109] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 995.469010] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa1a5c5-1ccc-42e5-a768-9674d1154640 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.478674] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 995.479058] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79621047-f52c-42f0-ae09-4e9dbe5f0f4f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.564662] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 995.564957] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 995.565162] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Deleting the datastore file [datastore2] 256319c4-817d-4267-8531-a65f0f8cd0b6 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 995.565836] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1c92ce1f-0b54-4705-b9bc-bdc40cc5cd79 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.571210] env[69927]: DEBUG oslo_vmware.api [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4096238, 'name': PowerOffVM_Task, 'duration_secs': 0.219476} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.572043] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 995.572328] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 995.572581] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9750e680-8490-4905-a539-a24688c1f6d2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.576661] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 995.576661] env[69927]: value = "task-4096240" [ 995.576661] env[69927]: _type = "Task" [ 995.576661] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.585755] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096240, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.587210] env[69927]: DEBUG nova.network.neutron [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 995.648310] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 995.648506] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Cleaning up deleted instances with incomplete migration {{(pid=69927) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 995.654609] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 995.654609] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 995.654855] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Deleting the datastore file [datastore1] e0bca101-cf8d-48e1-a331-b0018548593e {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 995.655398] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-faba032d-fd81-4d6f-ae44-3074d5c740c8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.663635] env[69927]: DEBUG oslo_vmware.api [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Waiting for the task: (returnval){ [ 995.663635] env[69927]: value = "task-4096242" [ 995.663635] env[69927]: _type = "Task" [ 995.663635] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.677996] env[69927]: DEBUG oslo_vmware.api [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4096242, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.684650] env[69927]: DEBUG nova.compute.manager [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 995.718164] env[69927]: DEBUG nova.virt.hardware [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 995.718457] env[69927]: DEBUG nova.virt.hardware [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 995.718623] env[69927]: DEBUG nova.virt.hardware [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 995.718888] env[69927]: DEBUG nova.virt.hardware [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 995.719030] env[69927]: DEBUG nova.virt.hardware [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 995.719194] env[69927]: DEBUG nova.virt.hardware [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 995.719448] env[69927]: DEBUG nova.virt.hardware [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 995.719841] env[69927]: DEBUG nova.virt.hardware [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 995.719841] env[69927]: DEBUG nova.virt.hardware [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 995.719970] env[69927]: DEBUG nova.virt.hardware [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 995.720174] env[69927]: DEBUG nova.virt.hardware [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 995.721079] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d0cd94-2b73-4d35-8e42-fe8b35bff7df {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.730955] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b85abd-f7b3-4bbc-a1f0-b453f02e769e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.770034] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.801437] env[69927]: DEBUG nova.network.neutron [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Updating instance_info_cache with network_info: [{"id": "3edeb122-1a25-4dcd-93fc-1dcf798a6da1", "address": "fa:16:3e:b9:9c:54", "network": {"id": "efde2a11-36e6-487a-aeb4-7807070b2fe5", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-389765351-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "71f04d90c8ca48209037157448596060", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5514c5a3-1294-40ad-ae96-29d5c24a3d95", "external-id": "nsx-vlan-transportzone-179", "segmentation_id": 179, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3edeb122-1a", "ovs_interfaceid": "3edeb122-1a25-4dcd-93fc-1dcf798a6da1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.810269] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f4e2c4d9-d735-4ee3-a0cb-69d1fee69d35 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Acquiring lock "interface-74ea584f-b20f-425b-acb3-0ec60e7f2a1e-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.810673] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f4e2c4d9-d735-4ee3-a0cb-69d1fee69d35 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Lock "interface-74ea584f-b20f-425b-acb3-0ec60e7f2a1e-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 995.811222] env[69927]: DEBUG nova.objects.instance [None req-f4e2c4d9-d735-4ee3-a0cb-69d1fee69d35 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Lazy-loading 'flavor' on Instance uuid 74ea584f-b20f-425b-acb3-0ec60e7f2a1e {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 995.837453] env[69927]: DEBUG nova.scheduler.client.report [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 995.911046] env[69927]: DEBUG oslo_vmware.api [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096236, 'name': PowerOnVM_Task, 'duration_secs': 0.604841} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.911406] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 995.911616] env[69927]: INFO nova.compute.manager [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Took 9.84 seconds to spawn the instance on the hypervisor. [ 995.911802] env[69927]: DEBUG nova.compute.manager [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 995.912641] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-158ea272-e3c5-4d3c-92d2-b829d1cd473f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.093905] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096240, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198404} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.095053] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 996.095337] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 996.095627] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 996.130204] env[69927]: DEBUG nova.network.neutron [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Successfully updated port: 89f92ae9-a8f4-402a-b248-cc3ad9bf67d7 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 996.156618] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 996.182615] env[69927]: DEBUG oslo_vmware.api [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Task: {'id': task-4096242, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165953} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.183189] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 996.183371] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 996.183547] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 996.183897] env[69927]: INFO nova.compute.manager [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Took 1.15 seconds to destroy the instance on the hypervisor. [ 996.183962] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 996.184151] env[69927]: DEBUG nova.compute.manager [-] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 996.184235] env[69927]: DEBUG nova.network.neutron [-] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 996.308025] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Releasing lock "refresh_cache-7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 996.308025] env[69927]: DEBUG nova.compute.manager [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Instance network_info: |[{"id": "3edeb122-1a25-4dcd-93fc-1dcf798a6da1", "address": "fa:16:3e:b9:9c:54", "network": {"id": "efde2a11-36e6-487a-aeb4-7807070b2fe5", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-389765351-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "71f04d90c8ca48209037157448596060", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5514c5a3-1294-40ad-ae96-29d5c24a3d95", "external-id": "nsx-vlan-transportzone-179", "segmentation_id": 179, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3edeb122-1a", "ovs_interfaceid": "3edeb122-1a25-4dcd-93fc-1dcf798a6da1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 996.308025] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:9c:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5514c5a3-1294-40ad-ae96-29d5c24a3d95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3edeb122-1a25-4dcd-93fc-1dcf798a6da1', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 996.314042] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Creating folder: Project (71f04d90c8ca48209037157448596060). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 996.315080] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81ecf54c-0438-43a5-bbb0-27687c63dce4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.319900] env[69927]: DEBUG nova.objects.instance [None req-f4e2c4d9-d735-4ee3-a0cb-69d1fee69d35 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Lazy-loading 'pci_requests' on Instance uuid 74ea584f-b20f-425b-acb3-0ec60e7f2a1e {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 996.333872] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Created folder: Project (71f04d90c8ca48209037157448596060) in parent group-v811283. [ 996.334408] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Creating folder: Instances. Parent ref: group-v811490. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 996.334841] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-029339ac-4eb0-4185-947f-6e2ed83f13d3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.346021] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.704s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.346021] env[69927]: DEBUG nova.compute.manager [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 996.351395] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.918s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.351795] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.354387] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.270s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.356739] env[69927]: INFO nova.compute.claims [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 996.361196] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Created folder: Instances in parent group-v811490. [ 996.361632] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 996.365016] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 996.365016] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e5508525-36dd-496b-bf1e-c852d18d9c60 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.388942] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 996.388942] env[69927]: value = "task-4096245" [ 996.388942] env[69927]: _type = "Task" [ 996.388942] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.395491] env[69927]: INFO nova.scheduler.client.report [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Deleted allocations for instance ff227e07-8e36-48d6-a8c7-1e0087fd1faa [ 996.406887] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096245, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.410017] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f418d9-9c38-47da-9292-07846231250d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.419984] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-67221774-56b9-4724-af96-93dfb4e5ed02 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Doing hard reboot of VM {{(pid=69927) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 996.420714] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-faad1a47-6dde-446e-905c-4dffd3aabd91 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.430560] env[69927]: DEBUG oslo_vmware.api [None req-67221774-56b9-4724-af96-93dfb4e5ed02 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Waiting for the task: (returnval){ [ 996.430560] env[69927]: value = "task-4096246" [ 996.430560] env[69927]: _type = "Task" [ 996.430560] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.438495] env[69927]: INFO nova.compute.manager [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Took 67.41 seconds to build instance. [ 996.447666] env[69927]: DEBUG oslo_vmware.api [None req-67221774-56b9-4724-af96-93dfb4e5ed02 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': task-4096246, 'name': ResetVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.635295] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "refresh_cache-9c00e485-fd59-4571-abd5-80ca5e3bac1b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.635481] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired lock "refresh_cache-9c00e485-fd59-4571-abd5-80ca5e3bac1b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 996.635673] env[69927]: DEBUG nova.network.neutron [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 996.823132] env[69927]: DEBUG nova.objects.base [None req-f4e2c4d9-d735-4ee3-a0cb-69d1fee69d35 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Object Instance<74ea584f-b20f-425b-acb3-0ec60e7f2a1e> lazy-loaded attributes: flavor,pci_requests {{(pid=69927) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 996.823360] env[69927]: DEBUG nova.network.neutron [None req-f4e2c4d9-d735-4ee3-a0cb-69d1fee69d35 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 996.850348] env[69927]: DEBUG nova.compute.utils [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 996.851682] env[69927]: DEBUG nova.compute.manager [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 996.851854] env[69927]: DEBUG nova.network.neutron [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 996.909477] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096245, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.909477] env[69927]: DEBUG nova.policy [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '181ec10b2e4b4f1794294d18313a5918', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '71a47794e5824701925ad4bdc3651196', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 996.914930] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d138d717-00ab-493e-92a2-1a6c3c034f24 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "ff227e07-8e36-48d6-a8c7-1e0087fd1faa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.853s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.916239] env[69927]: DEBUG oslo_concurrency.lockutils [req-33e5bbcb-c582-47ce-86cc-2f982707e060 req-7b19b47d-80ca-4058-a92e-2023f3b8b434 service nova] Acquired lock "ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 996.917622] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3751e665-f4da-410d-bed9-07a7c31a68be {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.933515] env[69927]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 996.933770] env[69927]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=69927) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 996.945820] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b0954958-e867-446e-807b-8fed7bdcdb68 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.950934] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08a09734-0a4c-4960-9c4f-48617c847873 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Lock "c87680be-227e-4a3e-92d3-c2310623bfe4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.605s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.960138] env[69927]: DEBUG oslo_vmware.api [None req-67221774-56b9-4724-af96-93dfb4e5ed02 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': task-4096246, 'name': ResetVM_Task, 'duration_secs': 0.111037} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.962015] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-67221774-56b9-4724-af96-93dfb4e5ed02 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Did hard reboot of VM {{(pid=69927) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 996.962892] env[69927]: DEBUG nova.compute.manager [None req-67221774-56b9-4724-af96-93dfb4e5ed02 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 996.964543] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b793fc96-f70c-462e-87ed-428e0ce86823 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.972352] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc879d6e-ec6a-4f37-b835-1a3ad1fa7060 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.992392] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f4e2c4d9-d735-4ee3-a0cb-69d1fee69d35 tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Lock "interface-74ea584f-b20f-425b-acb3-0ec60e7f2a1e-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.182s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.994906] env[69927]: DEBUG nova.network.neutron [-] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.029970] env[69927]: ERROR root [req-33e5bbcb-c582-47ce-86cc-2f982707e060 req-7b19b47d-80ca-4058-a92e-2023f3b8b434 service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-811413' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 479, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-811413' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-811413' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-811413'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-811413' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-811413' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-811413'}\n"]: nova.exception.InstanceNotFound: Instance ff227e07-8e36-48d6-a8c7-1e0087fd1faa could not be found. [ 997.030292] env[69927]: DEBUG oslo_concurrency.lockutils [req-33e5bbcb-c582-47ce-86cc-2f982707e060 req-7b19b47d-80ca-4058-a92e-2023f3b8b434 service nova] Releasing lock "ff227e07-8e36-48d6-a8c7-1e0087fd1faa" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 997.030566] env[69927]: DEBUG nova.compute.manager [req-33e5bbcb-c582-47ce-86cc-2f982707e060 req-7b19b47d-80ca-4058-a92e-2023f3b8b434 service nova] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Detach interface failed, port_id=7c2fe55b-b50d-414d-bc2e-984a899ad2e4, reason: Instance ff227e07-8e36-48d6-a8c7-1e0087fd1faa could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 997.149592] env[69927]: DEBUG nova.virt.hardware [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 997.149875] env[69927]: DEBUG nova.virt.hardware [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 997.150043] env[69927]: DEBUG nova.virt.hardware [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 997.150231] env[69927]: DEBUG nova.virt.hardware [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 997.150423] env[69927]: DEBUG nova.virt.hardware [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 997.150588] env[69927]: DEBUG nova.virt.hardware [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 997.150803] env[69927]: DEBUG nova.virt.hardware [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 997.150963] env[69927]: DEBUG nova.virt.hardware [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 997.151151] env[69927]: DEBUG nova.virt.hardware [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 997.151318] env[69927]: DEBUG nova.virt.hardware [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 997.151492] env[69927]: DEBUG nova.virt.hardware [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 997.152431] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-205202fc-06de-4629-bade-f74ffc78824f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.162216] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90664148-4d78-4b0f-9a38-edbfb642aaf4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.178395] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:23:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4c8a5d7c-ee1f-4a41-94e4-db31e85a398d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '85825835-357a-42a3-81f4-b55d7e165b65', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 997.186065] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 997.187024] env[69927]: DEBUG nova.network.neutron [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 997.189279] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 997.189530] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a6a43b26-3cbf-4842-85d7-fc0e9cfbed72 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.213444] env[69927]: DEBUG nova.compute.manager [req-db64ca49-b768-4e92-9b07-ecd0899dbad8 req-1fdd4544-e122-426d-b3ff-23f641c30c24 service nova] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Received event network-vif-plugged-89f92ae9-a8f4-402a-b248-cc3ad9bf67d7 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 997.213839] env[69927]: DEBUG oslo_concurrency.lockutils [req-db64ca49-b768-4e92-9b07-ecd0899dbad8 req-1fdd4544-e122-426d-b3ff-23f641c30c24 service nova] Acquiring lock "9c00e485-fd59-4571-abd5-80ca5e3bac1b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.213897] env[69927]: DEBUG oslo_concurrency.lockutils [req-db64ca49-b768-4e92-9b07-ecd0899dbad8 req-1fdd4544-e122-426d-b3ff-23f641c30c24 service nova] Lock "9c00e485-fd59-4571-abd5-80ca5e3bac1b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.214080] env[69927]: DEBUG oslo_concurrency.lockutils [req-db64ca49-b768-4e92-9b07-ecd0899dbad8 req-1fdd4544-e122-426d-b3ff-23f641c30c24 service nova] Lock "9c00e485-fd59-4571-abd5-80ca5e3bac1b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.214266] env[69927]: DEBUG nova.compute.manager [req-db64ca49-b768-4e92-9b07-ecd0899dbad8 req-1fdd4544-e122-426d-b3ff-23f641c30c24 service nova] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] No waiting events found dispatching network-vif-plugged-89f92ae9-a8f4-402a-b248-cc3ad9bf67d7 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 997.214447] env[69927]: WARNING nova.compute.manager [req-db64ca49-b768-4e92-9b07-ecd0899dbad8 req-1fdd4544-e122-426d-b3ff-23f641c30c24 service nova] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Received unexpected event network-vif-plugged-89f92ae9-a8f4-402a-b248-cc3ad9bf67d7 for instance with vm_state building and task_state spawning. [ 997.216544] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 997.216544] env[69927]: value = "task-4096247" [ 997.216544] env[69927]: _type = "Task" [ 997.216544] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.227205] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096247, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.356637] env[69927]: DEBUG nova.compute.manager [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 997.404299] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096245, 'name': CreateVM_Task, 'duration_secs': 0.978289} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.404520] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 997.405361] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.406539] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.406539] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 997.406539] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-477b377d-a8b1-46f3-bbc0-68628828a678 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.410447] env[69927]: DEBUG nova.network.neutron [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Updating instance_info_cache with network_info: [{"id": "89f92ae9-a8f4-402a-b248-cc3ad9bf67d7", "address": "fa:16:3e:c4:9b:91", "network": {"id": "2cac2664-2b1d-4bb6-a58a-f8e96679d038", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1945522269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3afde63c8cbe4aecb32a470fd6b948f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89f92ae9-a8", "ovs_interfaceid": "89f92ae9-a8f4-402a-b248-cc3ad9bf67d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.418208] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Waiting for the task: (returnval){ [ 997.418208] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529bf6a8-23e0-d62b-c76a-ad5d40aaf0f5" [ 997.418208] env[69927]: _type = "Task" [ 997.418208] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.434145] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529bf6a8-23e0-d62b-c76a-ad5d40aaf0f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.483626] env[69927]: DEBUG nova.network.neutron [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Successfully created port: f0303e15-89fc-4eb7-825c-9b0cea4b5718 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 997.504335] env[69927]: INFO nova.compute.manager [-] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Took 1.32 seconds to deallocate network for instance. [ 997.513224] env[69927]: DEBUG oslo_concurrency.lockutils [None req-67221774-56b9-4724-af96-93dfb4e5ed02 tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Lock "01c8eb3b-bf30-4b00-af71-e32f0dc19171" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.700s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.726784] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096247, 'name': CreateVM_Task, 'duration_secs': 0.392593} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.729236] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 997.730278] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.844643] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c840b834-9157-4fbf-afde-c725698e329c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.854544] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b710e7c3-97eb-4a37-90b0-a4407440b0d1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.893026] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acae79be-ceda-420d-9af1-38db09f7a9b4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.899403] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb7b1a7-e248-4796-b2ad-175f766a4327 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.916748] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Releasing lock "refresh_cache-9c00e485-fd59-4571-abd5-80ca5e3bac1b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 997.916748] env[69927]: DEBUG nova.compute.manager [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Instance network_info: |[{"id": "89f92ae9-a8f4-402a-b248-cc3ad9bf67d7", "address": "fa:16:3e:c4:9b:91", "network": {"id": "2cac2664-2b1d-4bb6-a58a-f8e96679d038", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1945522269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3afde63c8cbe4aecb32a470fd6b948f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89f92ae9-a8", "ovs_interfaceid": "89f92ae9-a8f4-402a-b248-cc3ad9bf67d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 997.916982] env[69927]: DEBUG nova.compute.provider_tree [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 997.919499] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:9b:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '130387c4-e4ec-4d95-8e9d-bb079baabad8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89f92ae9-a8f4-402a-b248-cc3ad9bf67d7', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 997.928289] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 997.930401] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 997.935175] env[69927]: DEBUG nova.compute.manager [req-c9f9fad5-a4f5-48e0-a561-19c3f5aea699 req-4e3ea7a7-74d4-426d-918a-cf6ad96ad3d0 service nova] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Received event network-changed-3edeb122-1a25-4dcd-93fc-1dcf798a6da1 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 997.935377] env[69927]: DEBUG nova.compute.manager [req-c9f9fad5-a4f5-48e0-a561-19c3f5aea699 req-4e3ea7a7-74d4-426d-918a-cf6ad96ad3d0 service nova] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Refreshing instance network info cache due to event network-changed-3edeb122-1a25-4dcd-93fc-1dcf798a6da1. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 997.935601] env[69927]: DEBUG oslo_concurrency.lockutils [req-c9f9fad5-a4f5-48e0-a561-19c3f5aea699 req-4e3ea7a7-74d4-426d-918a-cf6ad96ad3d0 service nova] Acquiring lock "refresh_cache-7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.935743] env[69927]: DEBUG oslo_concurrency.lockutils [req-c9f9fad5-a4f5-48e0-a561-19c3f5aea699 req-4e3ea7a7-74d4-426d-918a-cf6ad96ad3d0 service nova] Acquired lock "refresh_cache-7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.935904] env[69927]: DEBUG nova.network.neutron [req-c9f9fad5-a4f5-48e0-a561-19c3f5aea699 req-4e3ea7a7-74d4-426d-918a-cf6ad96ad3d0 service nova] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Refreshing network info cache for port 3edeb122-1a25-4dcd-93fc-1dcf798a6da1 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 997.937794] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c8de76a9-e7e2-46f7-b27a-8ad0ac404a78 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.969031] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529bf6a8-23e0-d62b-c76a-ad5d40aaf0f5, 'name': SearchDatastore_Task, 'duration_secs': 0.014586} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.969031] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 997.969031] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 997.969271] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.969382] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.969602] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 997.969915] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 997.969915] env[69927]: value = "task-4096248" [ 997.969915] env[69927]: _type = "Task" [ 997.969915] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.970252] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.970710] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 997.971355] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-06953459-c181-4726-9495-58a601af0912 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.974640] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6a02322-b10f-4b4a-bf80-1b217bb93000 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.983858] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 997.983858] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5257d5b7-822f-a8fd-d76d-c34f49c5217e" [ 997.983858] env[69927]: _type = "Task" [ 997.983858] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.991411] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 997.991638] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 997.992366] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096248, 'name': CreateVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.993044] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-611ac4e4-5437-439c-9e87-08a6892a0ecc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.999443] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5257d5b7-822f-a8fd-d76d-c34f49c5217e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.003935] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Waiting for the task: (returnval){ [ 998.003935] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525905a5-02e7-2c3e-65f8-a441673775c0" [ 998.003935] env[69927]: _type = "Task" [ 998.003935] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.014953] env[69927]: DEBUG oslo_concurrency.lockutils [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.018984] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525905a5-02e7-2c3e-65f8-a441673775c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.366283] env[69927]: DEBUG nova.compute.manager [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 998.408965] env[69927]: DEBUG nova.virt.hardware [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 998.409233] env[69927]: DEBUG nova.virt.hardware [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 998.409391] env[69927]: DEBUG nova.virt.hardware [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 998.409584] env[69927]: DEBUG nova.virt.hardware [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 998.409797] env[69927]: DEBUG nova.virt.hardware [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 998.409913] env[69927]: DEBUG nova.virt.hardware [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 998.410140] env[69927]: DEBUG nova.virt.hardware [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 998.410303] env[69927]: DEBUG nova.virt.hardware [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 998.410474] env[69927]: DEBUG nova.virt.hardware [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 998.410639] env[69927]: DEBUG nova.virt.hardware [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 998.410847] env[69927]: DEBUG nova.virt.hardware [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 998.412209] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8235a44c-a06e-4739-bfaa-7517d6edfdbe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.421619] env[69927]: DEBUG nova.scheduler.client.report [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 998.429033] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3beb3599-3711-4741-9290-65739dab7aa9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.486400] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096248, 'name': CreateVM_Task, 'duration_secs': 0.419821} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.489697] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 998.490786] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.497687] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5257d5b7-822f-a8fd-d76d-c34f49c5217e, 'name': SearchDatastore_Task, 'duration_secs': 0.025704} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.497900] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 998.498167] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 998.498379] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.498620] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 998.498942] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 998.499231] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-889bbae7-4fc8-4623-a35c-5c47d0425fa9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.505491] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 998.505491] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52185636-60a1-cb5a-9bc5-899b52367aa0" [ 998.505491] env[69927]: _type = "Task" [ 998.505491] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.520724] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525905a5-02e7-2c3e-65f8-a441673775c0, 'name': SearchDatastore_Task, 'duration_secs': 0.012926} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.524719] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52185636-60a1-cb5a-9bc5-899b52367aa0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.527366] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c163afe-fffe-4157-9d1d-4fc91116c105 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.534053] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Waiting for the task: (returnval){ [ 998.534053] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523844aa-0c19-1dfc-d524-0819a811ef54" [ 998.534053] env[69927]: _type = "Task" [ 998.534053] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.545366] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523844aa-0c19-1dfc-d524-0819a811ef54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.653551] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c602503-498d-4895-8891-5ad7a0bcf4f5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.662868] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9c0f4edc-3624-49cd-9108-48b10976ad16 tempest-ServersAdminNegativeTestJSON-2102141716 tempest-ServersAdminNegativeTestJSON-2102141716-project-admin] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Suspending the VM {{(pid=69927) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 998.663136] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-81b1bec0-fbcd-40cb-b00f-3149ddce58b7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.670349] env[69927]: DEBUG oslo_vmware.api [None req-9c0f4edc-3624-49cd-9108-48b10976ad16 tempest-ServersAdminNegativeTestJSON-2102141716 tempest-ServersAdminNegativeTestJSON-2102141716-project-admin] Waiting for the task: (returnval){ [ 998.670349] env[69927]: value = "task-4096249" [ 998.670349] env[69927]: _type = "Task" [ 998.670349] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.679445] env[69927]: DEBUG oslo_vmware.api [None req-9c0f4edc-3624-49cd-9108-48b10976ad16 tempest-ServersAdminNegativeTestJSON-2102141716 tempest-ServersAdminNegativeTestJSON-2102141716-project-admin] Task: {'id': task-4096249, 'name': SuspendVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.806325] env[69927]: DEBUG nova.network.neutron [req-c9f9fad5-a4f5-48e0-a561-19c3f5aea699 req-4e3ea7a7-74d4-426d-918a-cf6ad96ad3d0 service nova] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Updated VIF entry in instance network info cache for port 3edeb122-1a25-4dcd-93fc-1dcf798a6da1. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 998.806765] env[69927]: DEBUG nova.network.neutron [req-c9f9fad5-a4f5-48e0-a561-19c3f5aea699 req-4e3ea7a7-74d4-426d-918a-cf6ad96ad3d0 service nova] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Updating instance_info_cache with network_info: [{"id": "3edeb122-1a25-4dcd-93fc-1dcf798a6da1", "address": "fa:16:3e:b9:9c:54", "network": {"id": "efde2a11-36e6-487a-aeb4-7807070b2fe5", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-389765351-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "71f04d90c8ca48209037157448596060", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5514c5a3-1294-40ad-ae96-29d5c24a3d95", "external-id": "nsx-vlan-transportzone-179", "segmentation_id": 179, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3edeb122-1a", "ovs_interfaceid": "3edeb122-1a25-4dcd-93fc-1dcf798a6da1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.932168] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.578s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 998.932647] env[69927]: DEBUG nova.compute.manager [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 998.939270] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.460s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.939270] env[69927]: INFO nova.compute.claims [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 999.021604] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52185636-60a1-cb5a-9bc5-899b52367aa0, 'name': SearchDatastore_Task, 'duration_secs': 0.020021} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.021604] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 999.021761] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 999.022040] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.046829] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523844aa-0c19-1dfc-d524-0819a811ef54, 'name': SearchDatastore_Task, 'duration_secs': 0.016774} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.046885] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 999.047185] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73/7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 999.047499] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 999.047689] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 999.048035] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d62dd03d-1ae7-465e-8f0e-0cdaf0761f28 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.051421] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a03fdc62-bf02-4ba2-aeab-88c92c890ad5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.063882] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Waiting for the task: (returnval){ [ 999.063882] env[69927]: value = "task-4096250" [ 999.063882] env[69927]: _type = "Task" [ 999.063882] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.070561] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 999.070939] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 999.072535] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c64a05cb-32f3-40b0-b330-c6159912533a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.081812] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Task: {'id': task-4096250, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.090889] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 999.090889] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526e0585-7dcc-af5c-a274-724ef743b1b7" [ 999.090889] env[69927]: _type = "Task" [ 999.090889] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.105128] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526e0585-7dcc-af5c-a274-724ef743b1b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.186325] env[69927]: DEBUG oslo_vmware.api [None req-9c0f4edc-3624-49cd-9108-48b10976ad16 tempest-ServersAdminNegativeTestJSON-2102141716 tempest-ServersAdminNegativeTestJSON-2102141716-project-admin] Task: {'id': task-4096249, 'name': SuspendVM_Task} progress is 62%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.186835] env[69927]: DEBUG oslo_concurrency.lockutils [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "c3e8a429-8484-4b11-abe3-1cccf0992556" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.187185] env[69927]: DEBUG oslo_concurrency.lockutils [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "c3e8a429-8484-4b11-abe3-1cccf0992556" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.187573] env[69927]: DEBUG oslo_concurrency.lockutils [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "c3e8a429-8484-4b11-abe3-1cccf0992556-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.187809] env[69927]: DEBUG oslo_concurrency.lockutils [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "c3e8a429-8484-4b11-abe3-1cccf0992556-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.188087] env[69927]: DEBUG oslo_concurrency.lockutils [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "c3e8a429-8484-4b11-abe3-1cccf0992556-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.191843] env[69927]: INFO nova.compute.manager [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Terminating instance [ 999.311203] env[69927]: DEBUG oslo_concurrency.lockutils [req-c9f9fad5-a4f5-48e0-a561-19c3f5aea699 req-4e3ea7a7-74d4-426d-918a-cf6ad96ad3d0 service nova] Releasing lock "refresh_cache-7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 999.437951] env[69927]: DEBUG nova.compute.utils [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 999.439620] env[69927]: DEBUG nova.compute.manager [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 999.439712] env[69927]: DEBUG nova.network.neutron [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 999.480100] env[69927]: DEBUG nova.network.neutron [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Successfully updated port: f0303e15-89fc-4eb7-825c-9b0cea4b5718 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 999.531141] env[69927]: DEBUG nova.policy [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '16c1e562693c466c8786016a777f9f32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9cf6bb3492c642aa9a168e484299289c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 999.574922] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Task: {'id': task-4096250, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.593230] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Acquiring lock "74ea584f-b20f-425b-acb3-0ec60e7f2a1e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.593230] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Lock "74ea584f-b20f-425b-acb3-0ec60e7f2a1e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.593230] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Acquiring lock "74ea584f-b20f-425b-acb3-0ec60e7f2a1e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.593230] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Lock "74ea584f-b20f-425b-acb3-0ec60e7f2a1e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.593406] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Lock "74ea584f-b20f-425b-acb3-0ec60e7f2a1e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.604234] env[69927]: INFO nova.compute.manager [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Terminating instance [ 999.616745] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526e0585-7dcc-af5c-a274-724ef743b1b7, 'name': SearchDatastore_Task, 'duration_secs': 0.024234} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.618598] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10d349c7-a7a4-45f9-a3f2-4cb04fc847e3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.630566] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 999.630566] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b20dbc-a043-0e9b-e4fe-a4db34750e44" [ 999.630566] env[69927]: _type = "Task" [ 999.630566] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.650223] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b20dbc-a043-0e9b-e4fe-a4db34750e44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.684861] env[69927]: DEBUG oslo_vmware.api [None req-9c0f4edc-3624-49cd-9108-48b10976ad16 tempest-ServersAdminNegativeTestJSON-2102141716 tempest-ServersAdminNegativeTestJSON-2102141716-project-admin] Task: {'id': task-4096249, 'name': SuspendVM_Task} progress is 62%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.697074] env[69927]: DEBUG nova.compute.manager [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 999.697280] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 999.699635] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e4c987-bff3-420c-87ca-e6348c0d40ac {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.708736] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 999.710062] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf9d22e0-5856-41e8-a9dc-626cf9a312a3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.720385] env[69927]: DEBUG oslo_vmware.api [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 999.720385] env[69927]: value = "task-4096251" [ 999.720385] env[69927]: _type = "Task" [ 999.720385] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.739173] env[69927]: DEBUG oslo_vmware.api [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096251, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.861968] env[69927]: DEBUG nova.compute.manager [req-84e02673-b43b-46c9-b462-1e7eaf92ace2 req-1e6f0f9e-c722-4f96-81c6-7cb9eaada2e7 service nova] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Received event network-changed-89f92ae9-a8f4-402a-b248-cc3ad9bf67d7 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 999.861968] env[69927]: DEBUG nova.compute.manager [req-84e02673-b43b-46c9-b462-1e7eaf92ace2 req-1e6f0f9e-c722-4f96-81c6-7cb9eaada2e7 service nova] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Refreshing instance network info cache due to event network-changed-89f92ae9-a8f4-402a-b248-cc3ad9bf67d7. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 999.861968] env[69927]: DEBUG oslo_concurrency.lockutils [req-84e02673-b43b-46c9-b462-1e7eaf92ace2 req-1e6f0f9e-c722-4f96-81c6-7cb9eaada2e7 service nova] Acquiring lock "refresh_cache-9c00e485-fd59-4571-abd5-80ca5e3bac1b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.862217] env[69927]: DEBUG oslo_concurrency.lockutils [req-84e02673-b43b-46c9-b462-1e7eaf92ace2 req-1e6f0f9e-c722-4f96-81c6-7cb9eaada2e7 service nova] Acquired lock "refresh_cache-9c00e485-fd59-4571-abd5-80ca5e3bac1b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 999.862477] env[69927]: DEBUG nova.network.neutron [req-84e02673-b43b-46c9-b462-1e7eaf92ace2 req-1e6f0f9e-c722-4f96-81c6-7cb9eaada2e7 service nova] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Refreshing network info cache for port 89f92ae9-a8f4-402a-b248-cc3ad9bf67d7 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 999.945147] env[69927]: DEBUG nova.compute.manager [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 999.984210] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "refresh_cache-7554b5e2-dcc3-421f-9fe9-a309c9aa03b7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.984500] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquired lock "refresh_cache-7554b5e2-dcc3-421f-9fe9-a309c9aa03b7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 999.984768] env[69927]: DEBUG nova.network.neutron [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1000.081026] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Task: {'id': task-4096250, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.976993} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.081026] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73/7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1000.081026] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1000.081026] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2c90c741-25c1-4e88-a15e-2130e241e6b2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.093972] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Waiting for the task: (returnval){ [ 1000.093972] env[69927]: value = "task-4096252" [ 1000.093972] env[69927]: _type = "Task" [ 1000.093972] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.102772] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Task: {'id': task-4096252, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.115031] env[69927]: DEBUG nova.compute.manager [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1000.115031] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1000.115031] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae61178-775c-4862-a5c1-daa3e6f6d072 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.133729] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1000.138176] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-df4f4567-8926-4904-a5d1-7531b15f6b4e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.154265] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b20dbc-a043-0e9b-e4fe-a4db34750e44, 'name': SearchDatastore_Task, 'duration_secs': 0.088283} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.154385] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1000.154843] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 256319c4-817d-4267-8531-a65f0f8cd0b6/256319c4-817d-4267-8531-a65f0f8cd0b6.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1000.155390] env[69927]: DEBUG oslo_vmware.api [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Waiting for the task: (returnval){ [ 1000.155390] env[69927]: value = "task-4096253" [ 1000.155390] env[69927]: _type = "Task" [ 1000.155390] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.155848] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1000.156193] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1000.156566] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed9b6cf5-63a6-41d1-b64c-3346e3d1d9ab {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.159648] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a403da9-d052-4ad6-981e-4d9114f31bf1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.186308] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 1000.186308] env[69927]: value = "task-4096254" [ 1000.186308] env[69927]: _type = "Task" [ 1000.186308] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.186608] env[69927]: DEBUG oslo_vmware.api [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Task: {'id': task-4096253, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.186843] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1000.187020] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1000.191282] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-038b7f63-8e3d-41c9-add2-a71d52d05e2d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.202182] env[69927]: DEBUG oslo_vmware.api [None req-9c0f4edc-3624-49cd-9108-48b10976ad16 tempest-ServersAdminNegativeTestJSON-2102141716 tempest-ServersAdminNegativeTestJSON-2102141716-project-admin] Task: {'id': task-4096249, 'name': SuspendVM_Task, 'duration_secs': 1.139408} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.202834] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9c0f4edc-3624-49cd-9108-48b10976ad16 tempest-ServersAdminNegativeTestJSON-2102141716 tempest-ServersAdminNegativeTestJSON-2102141716-project-admin] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Suspended the VM {{(pid=69927) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1000.206184] env[69927]: DEBUG nova.compute.manager [None req-9c0f4edc-3624-49cd-9108-48b10976ad16 tempest-ServersAdminNegativeTestJSON-2102141716 tempest-ServersAdminNegativeTestJSON-2102141716-project-admin] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1000.206184] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b56cb7e-670a-43f7-9df2-a9f52afd053b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.212354] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096254, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.212731] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1000.212731] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a3e209-f4fc-bd18-e642-b054a34707f2" [ 1000.212731] env[69927]: _type = "Task" [ 1000.212731] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.234393] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Acquiring lock "01c8eb3b-bf30-4b00-af71-e32f0dc19171" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.235344] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Lock "01c8eb3b-bf30-4b00-af71-e32f0dc19171" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.235344] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Acquiring lock "01c8eb3b-bf30-4b00-af71-e32f0dc19171-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.235344] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Lock "01c8eb3b-bf30-4b00-af71-e32f0dc19171-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.235550] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Lock "01c8eb3b-bf30-4b00-af71-e32f0dc19171-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.237353] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a3e209-f4fc-bd18-e642-b054a34707f2, 'name': SearchDatastore_Task, 'duration_secs': 0.023568} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.237967] env[69927]: INFO nova.compute.manager [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Terminating instance [ 1000.244995] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e87abb6-d224-4c1d-8231-db35e79250b0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.252811] env[69927]: DEBUG oslo_vmware.api [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096251, 'name': PowerOffVM_Task, 'duration_secs': 0.469374} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.255727] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1000.255727] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1000.255917] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-60855a97-29f1-494d-851c-f35cbbfd935e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.260995] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1000.260995] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5216668c-96d8-e691-8a96-026606ac01f1" [ 1000.260995] env[69927]: _type = "Task" [ 1000.260995] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.267660] env[69927]: DEBUG nova.network.neutron [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Successfully created port: af2d617c-7a43-466f-b19d-3cce0c52c836 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1000.279701] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5216668c-96d8-e691-8a96-026606ac01f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.370826] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1000.371263] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1000.371493] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Deleting the datastore file [datastore2] c3e8a429-8484-4b11-abe3-1cccf0992556 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1000.372253] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b5fc38b7-4227-46b2-a97e-d5cd5a27c85f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.380281] env[69927]: DEBUG oslo_vmware.api [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 1000.380281] env[69927]: value = "task-4096256" [ 1000.380281] env[69927]: _type = "Task" [ 1000.380281] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.391492] env[69927]: DEBUG oslo_vmware.api [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096256, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.426667] env[69927]: DEBUG nova.compute.manager [req-a1156c57-196d-49d5-8abe-4c0a9db7674f req-d683ff76-438d-4aa3-a2f5-6c4b3a299df1 service nova] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Received event network-vif-plugged-f0303e15-89fc-4eb7-825c-9b0cea4b5718 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1000.426899] env[69927]: DEBUG oslo_concurrency.lockutils [req-a1156c57-196d-49d5-8abe-4c0a9db7674f req-d683ff76-438d-4aa3-a2f5-6c4b3a299df1 service nova] Acquiring lock "7554b5e2-dcc3-421f-9fe9-a309c9aa03b7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.427131] env[69927]: DEBUG oslo_concurrency.lockutils [req-a1156c57-196d-49d5-8abe-4c0a9db7674f req-d683ff76-438d-4aa3-a2f5-6c4b3a299df1 service nova] Lock "7554b5e2-dcc3-421f-9fe9-a309c9aa03b7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.427285] env[69927]: DEBUG oslo_concurrency.lockutils [req-a1156c57-196d-49d5-8abe-4c0a9db7674f req-d683ff76-438d-4aa3-a2f5-6c4b3a299df1 service nova] Lock "7554b5e2-dcc3-421f-9fe9-a309c9aa03b7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.427517] env[69927]: DEBUG nova.compute.manager [req-a1156c57-196d-49d5-8abe-4c0a9db7674f req-d683ff76-438d-4aa3-a2f5-6c4b3a299df1 service nova] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] No waiting events found dispatching network-vif-plugged-f0303e15-89fc-4eb7-825c-9b0cea4b5718 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1000.427649] env[69927]: WARNING nova.compute.manager [req-a1156c57-196d-49d5-8abe-4c0a9db7674f req-d683ff76-438d-4aa3-a2f5-6c4b3a299df1 service nova] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Received unexpected event network-vif-plugged-f0303e15-89fc-4eb7-825c-9b0cea4b5718 for instance with vm_state building and task_state spawning. [ 1000.427800] env[69927]: DEBUG nova.compute.manager [req-a1156c57-196d-49d5-8abe-4c0a9db7674f req-d683ff76-438d-4aa3-a2f5-6c4b3a299df1 service nova] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Received event network-changed-f0303e15-89fc-4eb7-825c-9b0cea4b5718 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1000.427943] env[69927]: DEBUG nova.compute.manager [req-a1156c57-196d-49d5-8abe-4c0a9db7674f req-d683ff76-438d-4aa3-a2f5-6c4b3a299df1 service nova] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Refreshing instance network info cache due to event network-changed-f0303e15-89fc-4eb7-825c-9b0cea4b5718. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1000.428145] env[69927]: DEBUG oslo_concurrency.lockutils [req-a1156c57-196d-49d5-8abe-4c0a9db7674f req-d683ff76-438d-4aa3-a2f5-6c4b3a299df1 service nova] Acquiring lock "refresh_cache-7554b5e2-dcc3-421f-9fe9-a309c9aa03b7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.603889] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Task: {'id': task-4096252, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.180195} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.604216] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1000.605274] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-328c7935-554c-44b4-b4a4-597be3decc32 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.640665] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73/7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1000.645727] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6d1a87c-2d93-4827-ae4e-0cc3bcbfa8d5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.671680] env[69927]: DEBUG oslo_vmware.api [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Task: {'id': task-4096253, 'name': PowerOffVM_Task, 'duration_secs': 0.32769} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.672411] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1000.672598] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1000.672919] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Waiting for the task: (returnval){ [ 1000.672919] env[69927]: value = "task-4096257" [ 1000.672919] env[69927]: _type = "Task" [ 1000.672919] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.675683] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1dd69956-3d64-4380-843b-09d44c6440e7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.687338] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Task: {'id': task-4096257, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.689423] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51d73dc7-a404-4f95-917b-ac2f587dd802 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.704141] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34611127-0baa-411f-9189-af8b71c4bcf7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.708065] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096254, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.742083] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-523e2736-535f-48e8-804d-cba22a49827f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.750253] env[69927]: DEBUG nova.compute.manager [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1000.750253] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1000.752258] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20640af-ba7d-4756-948e-524f149d552b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.758480] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c5210e-b8a2-4309-9f76-c1cacc2e2941 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.769391] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1000.780288] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f6f22f8e-5c45-46df-8363-c1d0edfae472 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.783078] env[69927]: DEBUG nova.compute.provider_tree [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1000.793499] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1000.793858] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1000.794135] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Deleting the datastore file [datastore1] 74ea584f-b20f-425b-acb3-0ec60e7f2a1e {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1000.794959] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5216668c-96d8-e691-8a96-026606ac01f1, 'name': SearchDatastore_Task, 'duration_secs': 0.034645} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.796278] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c0ad2219-0c5a-45df-9766-85a4c7916040 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.799108] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1000.799108] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 9c00e485-fd59-4571-abd5-80ca5e3bac1b/9c00e485-fd59-4571-abd5-80ca5e3bac1b.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1000.799956] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6bd221ac-585b-4919-b473-894c814353cc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.805648] env[69927]: DEBUG oslo_vmware.api [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Waiting for the task: (returnval){ [ 1000.805648] env[69927]: value = "task-4096259" [ 1000.805648] env[69927]: _type = "Task" [ 1000.805648] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.812026] env[69927]: DEBUG oslo_vmware.api [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Waiting for the task: (returnval){ [ 1000.812026] env[69927]: value = "task-4096260" [ 1000.812026] env[69927]: _type = "Task" [ 1000.812026] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.814188] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1000.814188] env[69927]: value = "task-4096261" [ 1000.814188] env[69927]: _type = "Task" [ 1000.814188] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.828483] env[69927]: DEBUG oslo_vmware.api [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': task-4096259, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.835866] env[69927]: DEBUG oslo_vmware.api [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Task: {'id': task-4096260, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.839408] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096261, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.848203] env[69927]: DEBUG nova.network.neutron [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1000.893522] env[69927]: DEBUG oslo_vmware.api [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096256, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.961586] env[69927]: DEBUG nova.compute.manager [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1000.997472] env[69927]: DEBUG nova.virt.hardware [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1000.997749] env[69927]: DEBUG nova.virt.hardware [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1000.997918] env[69927]: DEBUG nova.virt.hardware [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1000.998149] env[69927]: DEBUG nova.virt.hardware [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1000.998312] env[69927]: DEBUG nova.virt.hardware [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1000.998488] env[69927]: DEBUG nova.virt.hardware [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1000.998707] env[69927]: DEBUG nova.virt.hardware [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1000.998896] env[69927]: DEBUG nova.virt.hardware [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1000.999122] env[69927]: DEBUG nova.virt.hardware [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1000.999336] env[69927]: DEBUG nova.virt.hardware [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1000.999531] env[69927]: DEBUG nova.virt.hardware [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1001.000731] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-220211d3-f5e2-4e45-b528-13504b7061a2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.013714] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fec91d7-6944-497e-8a5e-bcc062dc2f4c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.188432] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Task: {'id': task-4096257, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.202806] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096254, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.932856} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.203193] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 256319c4-817d-4267-8531-a65f0f8cd0b6/256319c4-817d-4267-8531-a65f0f8cd0b6.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1001.203288] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1001.203624] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8e0b8eb3-4ec2-49b8-baa7-cb17c6a54a43 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.211403] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 1001.211403] env[69927]: value = "task-4096262" [ 1001.211403] env[69927]: _type = "Task" [ 1001.211403] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.221137] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096262, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.322805] env[69927]: DEBUG oslo_vmware.api [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': task-4096259, 'name': PowerOffVM_Task, 'duration_secs': 0.378687} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.330512] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1001.330894] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1001.331341] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0a514ea0-3dbc-40c0-a33d-f64684859f14 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.337089] env[69927]: ERROR nova.scheduler.client.report [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [req-9ca7ecde-4c45-4ea2-b32d-473df1eb9e05] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2f529b36-df5f-4b37-8103-68f74f737726. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9ca7ecde-4c45-4ea2-b32d-473df1eb9e05"}]} [ 1001.340961] env[69927]: DEBUG nova.network.neutron [req-84e02673-b43b-46c9-b462-1e7eaf92ace2 req-1e6f0f9e-c722-4f96-81c6-7cb9eaada2e7 service nova] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Updated VIF entry in instance network info cache for port 89f92ae9-a8f4-402a-b248-cc3ad9bf67d7. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1001.340961] env[69927]: DEBUG nova.network.neutron [req-84e02673-b43b-46c9-b462-1e7eaf92ace2 req-1e6f0f9e-c722-4f96-81c6-7cb9eaada2e7 service nova] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Updating instance_info_cache with network_info: [{"id": "89f92ae9-a8f4-402a-b248-cc3ad9bf67d7", "address": "fa:16:3e:c4:9b:91", "network": {"id": "2cac2664-2b1d-4bb6-a58a-f8e96679d038", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1945522269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3afde63c8cbe4aecb32a470fd6b948f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89f92ae9-a8", "ovs_interfaceid": "89f92ae9-a8f4-402a-b248-cc3ad9bf67d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.344492] env[69927]: DEBUG oslo_vmware.api [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Task: {'id': task-4096260, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.390532} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.352032] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1001.352032] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1001.352032] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1001.352032] env[69927]: INFO nova.compute.manager [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1001.352032] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1001.352032] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096261, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.352032] env[69927]: DEBUG nova.compute.manager [-] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1001.352032] env[69927]: DEBUG nova.network.neutron [-] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1001.364347] env[69927]: DEBUG nova.scheduler.client.report [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Refreshing inventories for resource provider 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1001.388604] env[69927]: DEBUG nova.scheduler.client.report [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Updating ProviderTree inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1001.388812] env[69927]: DEBUG nova.compute.provider_tree [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1001.398272] env[69927]: DEBUG oslo_vmware.api [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096256, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.775564} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.398549] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1001.399089] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1001.399200] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1001.399340] env[69927]: INFO nova.compute.manager [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Took 1.70 seconds to destroy the instance on the hypervisor. [ 1001.399655] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1001.400829] env[69927]: DEBUG nova.network.neutron [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Updating instance_info_cache with network_info: [{"id": "f0303e15-89fc-4eb7-825c-9b0cea4b5718", "address": "fa:16:3e:2e:8e:93", "network": {"id": "095e5f02-dbfd-4d96-bd31-c8c5e870e449", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-240997183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "71a47794e5824701925ad4bdc3651196", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7b7edd0-124a-48ec-ae26-1aa14f9b884a", "external-id": "nsx-vlan-transportzone-861", "segmentation_id": 861, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0303e15-89", "ovs_interfaceid": "f0303e15-89fc-4eb7-825c-9b0cea4b5718", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.402126] env[69927]: DEBUG nova.compute.manager [-] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1001.402239] env[69927]: DEBUG nova.network.neutron [-] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1001.408652] env[69927]: DEBUG nova.scheduler.client.report [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Refreshing aggregate associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, aggregates: None {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1001.424475] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1001.424789] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1001.425040] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Deleting the datastore file [datastore1] 01c8eb3b-bf30-4b00-af71-e32f0dc19171 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1001.425398] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-323cc8d9-900b-4c67-a7e1-c4b455f38d82 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.437379] env[69927]: DEBUG oslo_vmware.api [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Waiting for the task: (returnval){ [ 1001.437379] env[69927]: value = "task-4096264" [ 1001.437379] env[69927]: _type = "Task" [ 1001.437379] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.446978] env[69927]: DEBUG oslo_vmware.api [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': task-4096264, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.448292] env[69927]: DEBUG nova.scheduler.client.report [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Refreshing trait associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1001.688941] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Task: {'id': task-4096257, 'name': ReconfigVM_Task, 'duration_secs': 0.608082} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.688941] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73/7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1001.690417] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-500d33a6-3f01-4177-9c79-dc715fa2cb38 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.697056] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Waiting for the task: (returnval){ [ 1001.697056] env[69927]: value = "task-4096265" [ 1001.697056] env[69927]: _type = "Task" [ 1001.697056] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.709375] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Task: {'id': task-4096265, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.726215] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096262, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.238706} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.726215] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1001.729820] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479f93a1-b388-4dbd-9fdb-e0213009ad00 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.759953] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Reconfiguring VM instance instance-00000025 to attach disk [datastore2] 256319c4-817d-4267-8531-a65f0f8cd0b6/256319c4-817d-4267-8531-a65f0f8cd0b6.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1001.763561] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33aabfbc-c8b4-4e5d-9b42-66eb2a1245c3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.786399] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 1001.786399] env[69927]: value = "task-4096266" [ 1001.786399] env[69927]: _type = "Task" [ 1001.786399] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.799376] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096266, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.840705] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096261, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.780363} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.840705] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 9c00e485-fd59-4571-abd5-80ca5e3bac1b/9c00e485-fd59-4571-abd5-80ca5e3bac1b.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1001.840834] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1001.841222] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-381266be-ac1e-4874-8e0b-e61dfe209f70 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.847924] env[69927]: DEBUG oslo_concurrency.lockutils [req-84e02673-b43b-46c9-b462-1e7eaf92ace2 req-1e6f0f9e-c722-4f96-81c6-7cb9eaada2e7 service nova] Releasing lock "refresh_cache-9c00e485-fd59-4571-abd5-80ca5e3bac1b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1001.850360] env[69927]: DEBUG nova.compute.manager [req-84e02673-b43b-46c9-b462-1e7eaf92ace2 req-1e6f0f9e-c722-4f96-81c6-7cb9eaada2e7 service nova] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Received event network-vif-deleted-efc77bd5-a980-4a4e-9211-70184239a8ee {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1001.854943] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1001.854943] env[69927]: value = "task-4096267" [ 1001.854943] env[69927]: _type = "Task" [ 1001.854943] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.864836] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096267, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.906325] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Releasing lock "refresh_cache-7554b5e2-dcc3-421f-9fe9-a309c9aa03b7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1001.907146] env[69927]: DEBUG nova.compute.manager [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Instance network_info: |[{"id": "f0303e15-89fc-4eb7-825c-9b0cea4b5718", "address": "fa:16:3e:2e:8e:93", "network": {"id": "095e5f02-dbfd-4d96-bd31-c8c5e870e449", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-240997183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "71a47794e5824701925ad4bdc3651196", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7b7edd0-124a-48ec-ae26-1aa14f9b884a", "external-id": "nsx-vlan-transportzone-861", "segmentation_id": 861, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0303e15-89", "ovs_interfaceid": "f0303e15-89fc-4eb7-825c-9b0cea4b5718", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1001.910353] env[69927]: DEBUG oslo_concurrency.lockutils [req-a1156c57-196d-49d5-8abe-4c0a9db7674f req-d683ff76-438d-4aa3-a2f5-6c4b3a299df1 service nova] Acquired lock "refresh_cache-7554b5e2-dcc3-421f-9fe9-a309c9aa03b7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1001.910796] env[69927]: DEBUG nova.network.neutron [req-a1156c57-196d-49d5-8abe-4c0a9db7674f req-d683ff76-438d-4aa3-a2f5-6c4b3a299df1 service nova] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Refreshing network info cache for port f0303e15-89fc-4eb7-825c-9b0cea4b5718 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1001.912592] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:8e:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b7b7edd0-124a-48ec-ae26-1aa14f9b884a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f0303e15-89fc-4eb7-825c-9b0cea4b5718', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1001.931310] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1001.933778] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1001.934369] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2b029b61-6dea-4f83-aa00-058b51305df4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.967460] env[69927]: DEBUG oslo_vmware.api [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Task: {'id': task-4096264, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.248263} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.969252] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1001.969457] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1001.969635] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1001.969979] env[69927]: INFO nova.compute.manager [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1001.970285] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1001.970800] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1001.970800] env[69927]: value = "task-4096268" [ 1001.970800] env[69927]: _type = "Task" [ 1001.970800] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.971018] env[69927]: DEBUG nova.compute.manager [-] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1001.971085] env[69927]: DEBUG nova.network.neutron [-] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1001.985336] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096268, 'name': CreateVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.048676] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a4e7dcb-4761-4ef9-a920-224bef103a3f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.060632] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77091e01-209d-4ad8-93bf-6c1a8ece3926 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.103627] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ac8aded-c1d9-43b3-a639-306bf301c804 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.114120] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d44562da-a438-45f1-b979-e674cc2182be {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.131849] env[69927]: DEBUG nova.compute.provider_tree [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1002.210733] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Task: {'id': task-4096265, 'name': Rename_Task, 'duration_secs': 0.177058} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.211069] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1002.211327] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e5fb3184-0395-4c82-8c46-6e87d18d9943 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.218696] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Waiting for the task: (returnval){ [ 1002.218696] env[69927]: value = "task-4096269" [ 1002.218696] env[69927]: _type = "Task" [ 1002.218696] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.232044] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Task: {'id': task-4096269, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.298833] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096266, 'name': ReconfigVM_Task, 'duration_secs': 0.364644} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.298935] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Reconfigured VM instance instance-00000025 to attach disk [datastore2] 256319c4-817d-4267-8531-a65f0f8cd0b6/256319c4-817d-4267-8531-a65f0f8cd0b6.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1002.299731] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d80ec64a-07f4-44eb-9f9c-5ca3372ce4b5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.307672] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 1002.307672] env[69927]: value = "task-4096270" [ 1002.307672] env[69927]: _type = "Task" [ 1002.307672] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.318266] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096270, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.366349] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096267, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07478} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.370022] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1002.370022] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4bf530a-73a6-4535-b1bc-b9522fdf7d4f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.393705] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] 9c00e485-fd59-4571-abd5-80ca5e3bac1b/9c00e485-fd59-4571-abd5-80ca5e3bac1b.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1002.394061] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-caf3b852-0f9b-4057-8bf4-96adeaf68e55 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.416629] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1002.416629] env[69927]: value = "task-4096271" [ 1002.416629] env[69927]: _type = "Task" [ 1002.416629] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.425529] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096271, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.486900] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096268, 'name': CreateVM_Task, 'duration_secs': 0.432659} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.486900] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1002.488333] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.488333] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1002.488517] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1002.488729] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cd2eb01-1124-40c8-a689-28f72297d23b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.494383] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1002.494383] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529b20d7-ff54-4e3f-c9af-2c6aee4f322f" [ 1002.494383] env[69927]: _type = "Task" [ 1002.494383] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.507024] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529b20d7-ff54-4e3f-c9af-2c6aee4f322f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.520485] env[69927]: DEBUG nova.network.neutron [-] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.541886] env[69927]: DEBUG nova.network.neutron [-] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.610328] env[69927]: DEBUG nova.network.neutron [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Successfully updated port: af2d617c-7a43-466f-b19d-3cce0c52c836 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1002.638988] env[69927]: DEBUG nova.scheduler.client.report [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1002.732604] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Task: {'id': task-4096269, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.792197] env[69927]: DEBUG nova.network.neutron [req-a1156c57-196d-49d5-8abe-4c0a9db7674f req-d683ff76-438d-4aa3-a2f5-6c4b3a299df1 service nova] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Updated VIF entry in instance network info cache for port f0303e15-89fc-4eb7-825c-9b0cea4b5718. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1002.792552] env[69927]: DEBUG nova.network.neutron [req-a1156c57-196d-49d5-8abe-4c0a9db7674f req-d683ff76-438d-4aa3-a2f5-6c4b3a299df1 service nova] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Updating instance_info_cache with network_info: [{"id": "f0303e15-89fc-4eb7-825c-9b0cea4b5718", "address": "fa:16:3e:2e:8e:93", "network": {"id": "095e5f02-dbfd-4d96-bd31-c8c5e870e449", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-240997183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "71a47794e5824701925ad4bdc3651196", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7b7edd0-124a-48ec-ae26-1aa14f9b884a", "external-id": "nsx-vlan-transportzone-861", "segmentation_id": 861, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0303e15-89", "ovs_interfaceid": "f0303e15-89fc-4eb7-825c-9b0cea4b5718", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.817783] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096270, 'name': Rename_Task, 'duration_secs': 0.239377} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.818172] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1002.818320] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8f7b9f0a-57ba-4a29-9d7d-31aa44a47534 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.827171] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 1002.827171] env[69927]: value = "task-4096272" [ 1002.827171] env[69927]: _type = "Task" [ 1002.827171] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.839130] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096272, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.927548] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096271, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.008833] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529b20d7-ff54-4e3f-c9af-2c6aee4f322f, 'name': SearchDatastore_Task, 'duration_secs': 0.012568} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.009294] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1003.009594] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1003.009877] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.010072] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1003.010384] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1003.010715] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad9d51a9-685a-40a1-9fd2-d53236832f72 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.021764] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1003.021964] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1003.023228] env[69927]: INFO nova.compute.manager [-] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Took 1.67 seconds to deallocate network for instance. [ 1003.023330] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a59b747-f0d9-402d-be10-eec30bdbff95 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.032797] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1003.032797] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5244575e-664b-d587-d543-f35bf2f465aa" [ 1003.032797] env[69927]: _type = "Task" [ 1003.032797] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.049572] env[69927]: INFO nova.compute.manager [-] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Took 1.65 seconds to deallocate network for instance. [ 1003.049572] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5244575e-664b-d587-d543-f35bf2f465aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.085036] env[69927]: DEBUG nova.compute.manager [req-0d288e14-130d-4e6d-8597-8f5846db84ee req-d33cbef3-14c8-41ac-b5f9-3136d7c3b8da service nova] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Received event network-vif-plugged-af2d617c-7a43-466f-b19d-3cce0c52c836 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1003.085306] env[69927]: DEBUG oslo_concurrency.lockutils [req-0d288e14-130d-4e6d-8597-8f5846db84ee req-d33cbef3-14c8-41ac-b5f9-3136d7c3b8da service nova] Acquiring lock "4b7934f8-2c97-480b-8af7-f09f6819e2b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.085578] env[69927]: DEBUG oslo_concurrency.lockutils [req-0d288e14-130d-4e6d-8597-8f5846db84ee req-d33cbef3-14c8-41ac-b5f9-3136d7c3b8da service nova] Lock "4b7934f8-2c97-480b-8af7-f09f6819e2b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.085730] env[69927]: DEBUG oslo_concurrency.lockutils [req-0d288e14-130d-4e6d-8597-8f5846db84ee req-d33cbef3-14c8-41ac-b5f9-3136d7c3b8da service nova] Lock "4b7934f8-2c97-480b-8af7-f09f6819e2b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.086961] env[69927]: DEBUG nova.compute.manager [req-0d288e14-130d-4e6d-8597-8f5846db84ee req-d33cbef3-14c8-41ac-b5f9-3136d7c3b8da service nova] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] No waiting events found dispatching network-vif-plugged-af2d617c-7a43-466f-b19d-3cce0c52c836 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1003.086961] env[69927]: WARNING nova.compute.manager [req-0d288e14-130d-4e6d-8597-8f5846db84ee req-d33cbef3-14c8-41ac-b5f9-3136d7c3b8da service nova] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Received unexpected event network-vif-plugged-af2d617c-7a43-466f-b19d-3cce0c52c836 for instance with vm_state building and task_state spawning. [ 1003.115206] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "refresh_cache-4b7934f8-2c97-480b-8af7-f09f6819e2b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.115206] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquired lock "refresh_cache-4b7934f8-2c97-480b-8af7-f09f6819e2b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1003.115632] env[69927]: DEBUG nova.network.neutron [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1003.143508] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.207s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.144330] env[69927]: DEBUG nova.compute.manager [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1003.148673] env[69927]: DEBUG oslo_concurrency.lockutils [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 37.698s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.149447] env[69927]: DEBUG nova.objects.instance [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69927) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1003.153981] env[69927]: DEBUG nova.network.neutron [-] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.235766] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Task: {'id': task-4096269, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.244808] env[69927]: DEBUG nova.compute.manager [req-beb4a01a-c743-48a6-bbbc-0edc78719e66 req-47168297-9b51-4777-8165-1c1703d53307 service nova] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Received event network-vif-deleted-866c0d6a-a2a2-45f3-a0e9-06356a7cc46d {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1003.245088] env[69927]: DEBUG nova.compute.manager [req-beb4a01a-c743-48a6-bbbc-0edc78719e66 req-47168297-9b51-4777-8165-1c1703d53307 service nova] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Received event network-vif-deleted-32049b49-d761-48ff-8938-d76ebe86f62e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1003.296963] env[69927]: DEBUG oslo_concurrency.lockutils [req-a1156c57-196d-49d5-8abe-4c0a9db7674f req-d683ff76-438d-4aa3-a2f5-6c4b3a299df1 service nova] Releasing lock "refresh_cache-7554b5e2-dcc3-421f-9fe9-a309c9aa03b7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1003.340916] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096272, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.430420] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096271, 'name': ReconfigVM_Task, 'duration_secs': 0.789511} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.430843] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Reconfigured VM instance instance-00000048 to attach disk [datastore2] 9c00e485-fd59-4571-abd5-80ca5e3bac1b/9c00e485-fd59-4571-abd5-80ca5e3bac1b.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1003.431691] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6cc55a3e-aa08-4b28-8ca4-578794c4089e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.442419] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1003.442419] env[69927]: value = "task-4096273" [ 1003.442419] env[69927]: _type = "Task" [ 1003.442419] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.454194] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096273, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.463044] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Acquiring lock "3936a3db-4afa-4a37-9d63-8c18b6b72c72" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.463380] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Lock "3936a3db-4afa-4a37-9d63-8c18b6b72c72" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.545026] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.545730] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5244575e-664b-d587-d543-f35bf2f465aa, 'name': SearchDatastore_Task, 'duration_secs': 0.019647} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.546642] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f1a2b7b-1106-4987-a193-b4e60ca4b821 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.552887] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1003.552887] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5209d260-ec20-6ad7-aef2-1d28aacb1db2" [ 1003.552887] env[69927]: _type = "Task" [ 1003.552887] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.557790] env[69927]: DEBUG oslo_concurrency.lockutils [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.564354] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5209d260-ec20-6ad7-aef2-1d28aacb1db2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.657052] env[69927]: DEBUG nova.compute.utils [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1003.657547] env[69927]: DEBUG nova.compute.manager [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1003.657791] env[69927]: DEBUG nova.network.neutron [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1003.664223] env[69927]: INFO nova.compute.manager [-] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Took 1.69 seconds to deallocate network for instance. [ 1003.686659] env[69927]: DEBUG nova.network.neutron [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1003.695618] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.695867] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.733485] env[69927]: DEBUG oslo_vmware.api [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Task: {'id': task-4096269, 'name': PowerOnVM_Task, 'duration_secs': 1.126451} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.735311] env[69927]: DEBUG nova.policy [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cae7aa9b1b2e4e8f9e8636fe513270f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6be96c1988054f0894a0b91881870c3c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1003.737051] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1003.737477] env[69927]: INFO nova.compute.manager [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Took 10.78 seconds to spawn the instance on the hypervisor. [ 1003.737620] env[69927]: DEBUG nova.compute.manager [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1003.738541] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3daf4edf-0d08-4eeb-a29c-115095229c80 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.840536] env[69927]: DEBUG oslo_vmware.api [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096272, 'name': PowerOnVM_Task, 'duration_secs': 0.614288} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.840868] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1003.841120] env[69927]: DEBUG nova.compute.manager [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1003.841971] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d987398-7733-4c4c-846d-8f941f29ca2f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.913376] env[69927]: DEBUG nova.network.neutron [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Updating instance_info_cache with network_info: [{"id": "af2d617c-7a43-466f-b19d-3cce0c52c836", "address": "fa:16:3e:ea:1d:36", "network": {"id": "bd90fedf-a086-42e6-9814-07044a035f6e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-951403655-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cf6bb3492c642aa9a168e484299289c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf2d617c-7a", "ovs_interfaceid": "af2d617c-7a43-466f-b19d-3cce0c52c836", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.953374] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096273, 'name': Rename_Task, 'duration_secs': 0.324972} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.953941] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1003.953941] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de672481-d02d-43f0-8c9e-c4afceb245bd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.960831] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1003.960831] env[69927]: value = "task-4096274" [ 1003.960831] env[69927]: _type = "Task" [ 1003.960831] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.966365] env[69927]: DEBUG nova.compute.manager [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1003.972211] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096274, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.035985] env[69927]: DEBUG nova.network.neutron [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Successfully created port: 4c8ce9d5-2863-409f-9eb4-7e69c28015e7 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1004.071623] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5209d260-ec20-6ad7-aef2-1d28aacb1db2, 'name': SearchDatastore_Task, 'duration_secs': 0.01086} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.071623] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1004.071623] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7/7554b5e2-dcc3-421f-9fe9-a309c9aa03b7.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1004.071623] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1f6a8356-42ae-45f7-b18b-add2f6405b5c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.081402] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1004.081402] env[69927]: value = "task-4096275" [ 1004.081402] env[69927]: _type = "Task" [ 1004.081402] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.089674] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096275, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.165349] env[69927]: DEBUG oslo_concurrency.lockutils [None req-505d9180-301b-4ca5-a8da-648d7cd8dda7 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.166966] env[69927]: DEBUG nova.compute.manager [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1004.171756] env[69927]: DEBUG oslo_concurrency.lockutils [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.444s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1004.171756] env[69927]: DEBUG nova.objects.instance [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lazy-loading 'resources' on Instance uuid 30d9d1ac-4be0-4723-86b5-0aceda88e67b {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1004.175267] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.264452] env[69927]: INFO nova.compute.manager [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Took 67.72 seconds to build instance. [ 1004.366419] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.419157] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Releasing lock "refresh_cache-4b7934f8-2c97-480b-8af7-f09f6819e2b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1004.419157] env[69927]: DEBUG nova.compute.manager [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Instance network_info: |[{"id": "af2d617c-7a43-466f-b19d-3cce0c52c836", "address": "fa:16:3e:ea:1d:36", "network": {"id": "bd90fedf-a086-42e6-9814-07044a035f6e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-951403655-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cf6bb3492c642aa9a168e484299289c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf2d617c-7a", "ovs_interfaceid": "af2d617c-7a43-466f-b19d-3cce0c52c836", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1004.419157] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:1d:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1cbd5e0e-9116-46f1-9748-13a73d2d7e75', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'af2d617c-7a43-466f-b19d-3cce0c52c836', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1004.434059] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Creating folder: Project (9cf6bb3492c642aa9a168e484299289c). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1004.437879] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ccc5d2a5-6a14-4a1e-88b2-2a500413aea8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.439084] env[69927]: DEBUG nova.network.neutron [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Successfully created port: 802f8075-d76e-4020-be35-7cbf0f84fc4a {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1004.454256] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Created folder: Project (9cf6bb3492c642aa9a168e484299289c) in parent group-v811283. [ 1004.454619] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Creating folder: Instances. Parent ref: group-v811496. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1004.455016] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-907d9aa7-526c-423f-a1de-0449af3f5cbb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.471330] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Created folder: Instances in parent group-v811496. [ 1004.471662] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1004.476022] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1004.476022] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-626b8379-39cb-4de9-8e7e-caaecd2d9b0d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.497779] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096274, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.504955] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1004.504955] env[69927]: value = "task-4096278" [ 1004.504955] env[69927]: _type = "Task" [ 1004.504955] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.515529] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096278, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.522053] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.591844] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096275, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.768625] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1b9e9972-4117-43a3-91cb-d605bfe1fb86 tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Lock "7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.433s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.975127] env[69927]: DEBUG oslo_vmware.api [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096274, 'name': PowerOnVM_Task, 'duration_secs': 0.836488} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.978137] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1004.978360] env[69927]: INFO nova.compute.manager [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Took 9.29 seconds to spawn the instance on the hypervisor. [ 1004.978542] env[69927]: DEBUG nova.compute.manager [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1004.979855] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af78ebc8-7178-4691-ab6b-4d3e82a29359 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.020864] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096278, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.099241] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096275, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.56804} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.099241] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7/7554b5e2-dcc3-421f-9fe9-a309c9aa03b7.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1005.099519] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1005.099894] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7d6eca34-998a-4e89-9f84-a5835e3bde79 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.108611] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1005.108611] env[69927]: value = "task-4096279" [ 1005.108611] env[69927]: _type = "Task" [ 1005.108611] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.118891] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096279, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.186819] env[69927]: DEBUG nova.compute.manager [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1005.201844] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c61aa6d-bf3a-4935-ba26-2de5712f7ecd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.212299] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1afba87c-fd0c-45b3-be9e-5b876c4bd6e0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.253621] env[69927]: DEBUG nova.virt.hardware [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1005.253892] env[69927]: DEBUG nova.virt.hardware [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1005.254061] env[69927]: DEBUG nova.virt.hardware [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1005.254290] env[69927]: DEBUG nova.virt.hardware [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1005.254448] env[69927]: DEBUG nova.virt.hardware [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1005.254605] env[69927]: DEBUG nova.virt.hardware [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1005.254842] env[69927]: DEBUG nova.virt.hardware [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1005.254999] env[69927]: DEBUG nova.virt.hardware [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1005.255254] env[69927]: DEBUG nova.virt.hardware [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1005.255338] env[69927]: DEBUG nova.virt.hardware [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1005.255572] env[69927]: DEBUG nova.virt.hardware [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1005.256603] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a9405fe-ff68-4135-86bd-e14d30a5a637 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.260064] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d92069c-808b-4318-8100-467852491692 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.272251] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d933718-2884-49be-b456-5451e7f28a18 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.276980] env[69927]: DEBUG nova.compute.manager [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1005.280963] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98e7fcc0-d225-47f1-808b-7d3a9251fa4b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.306693] env[69927]: DEBUG nova.compute.provider_tree [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1005.502551] env[69927]: INFO nova.compute.manager [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Took 59.87 seconds to build instance. [ 1005.518018] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096278, 'name': CreateVM_Task, 'duration_secs': 0.744086} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.518243] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1005.519033] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.519215] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1005.519542] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1005.519824] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06b160ad-7d6c-4ea7-99ff-2b6e2a11ff32 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.525691] env[69927]: DEBUG oslo_vmware.api [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1005.525691] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525a4915-27aa-5967-05f5-6129331f28af" [ 1005.525691] env[69927]: _type = "Task" [ 1005.525691] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.532582] env[69927]: DEBUG nova.compute.manager [req-2a95fdbf-6ea9-4dff-b3cb-a8d2a0983ebe req-d6fa99fc-146a-4e5f-abf4-f3135674fb8a service nova] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Received event network-changed-af2d617c-7a43-466f-b19d-3cce0c52c836 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1005.532630] env[69927]: DEBUG nova.compute.manager [req-2a95fdbf-6ea9-4dff-b3cb-a8d2a0983ebe req-d6fa99fc-146a-4e5f-abf4-f3135674fb8a service nova] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Refreshing instance network info cache due to event network-changed-af2d617c-7a43-466f-b19d-3cce0c52c836. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1005.532863] env[69927]: DEBUG oslo_concurrency.lockutils [req-2a95fdbf-6ea9-4dff-b3cb-a8d2a0983ebe req-d6fa99fc-146a-4e5f-abf4-f3135674fb8a service nova] Acquiring lock "refresh_cache-4b7934f8-2c97-480b-8af7-f09f6819e2b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.533017] env[69927]: DEBUG oslo_concurrency.lockutils [req-2a95fdbf-6ea9-4dff-b3cb-a8d2a0983ebe req-d6fa99fc-146a-4e5f-abf4-f3135674fb8a service nova] Acquired lock "refresh_cache-4b7934f8-2c97-480b-8af7-f09f6819e2b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1005.533189] env[69927]: DEBUG nova.network.neutron [req-2a95fdbf-6ea9-4dff-b3cb-a8d2a0983ebe req-d6fa99fc-146a-4e5f-abf4-f3135674fb8a service nova] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Refreshing network info cache for port af2d617c-7a43-466f-b19d-3cce0c52c836 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1005.541956] env[69927]: DEBUG oslo_vmware.api [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525a4915-27aa-5967-05f5-6129331f28af, 'name': SearchDatastore_Task, 'duration_secs': 0.010764} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.542290] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1005.542615] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1005.542829] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.542994] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1005.543197] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1005.543503] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8ee0348f-5993-43f1-9535-ef5346219a42 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.561346] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1005.561500] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1005.562336] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d375199-3407-4dc5-af9a-d670c9249839 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.569322] env[69927]: DEBUG oslo_vmware.api [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1005.569322] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528958e3-91a4-ca32-44fc-093ef40444f1" [ 1005.569322] env[69927]: _type = "Task" [ 1005.569322] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.578352] env[69927]: DEBUG oslo_vmware.api [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528958e3-91a4-ca32-44fc-093ef40444f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.620818] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096279, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079513} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.621155] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1005.621929] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add6240e-65d2-4584-b250-cf8baa447c39 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.647380] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7/7554b5e2-dcc3-421f-9fe9-a309c9aa03b7.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1005.650857] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a89a70d-235d-428d-b423-da154b9ec6ce {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.668088] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1005.668088] env[69927]: value = "task-4096280" [ 1005.668088] env[69927]: _type = "Task" [ 1005.668088] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.677512] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096280, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.691713] env[69927]: DEBUG nova.compute.manager [req-db2c1531-5dae-4451-bfae-04e1648b0508 req-049fbe24-00c0-44b5-ac7c-8dba30a1d005 service nova] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Received event network-changed-3edeb122-1a25-4dcd-93fc-1dcf798a6da1 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1005.691713] env[69927]: DEBUG nova.compute.manager [req-db2c1531-5dae-4451-bfae-04e1648b0508 req-049fbe24-00c0-44b5-ac7c-8dba30a1d005 service nova] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Refreshing instance network info cache due to event network-changed-3edeb122-1a25-4dcd-93fc-1dcf798a6da1. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1005.691713] env[69927]: DEBUG oslo_concurrency.lockutils [req-db2c1531-5dae-4451-bfae-04e1648b0508 req-049fbe24-00c0-44b5-ac7c-8dba30a1d005 service nova] Acquiring lock "refresh_cache-7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.692107] env[69927]: DEBUG oslo_concurrency.lockutils [req-db2c1531-5dae-4451-bfae-04e1648b0508 req-049fbe24-00c0-44b5-ac7c-8dba30a1d005 service nova] Acquired lock "refresh_cache-7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1005.692107] env[69927]: DEBUG nova.network.neutron [req-db2c1531-5dae-4451-bfae-04e1648b0508 req-049fbe24-00c0-44b5-ac7c-8dba30a1d005 service nova] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Refreshing network info cache for port 3edeb122-1a25-4dcd-93fc-1dcf798a6da1 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1005.809326] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.811491] env[69927]: DEBUG nova.scheduler.client.report [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1006.003479] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6781f3a0-efb9-4a4e-a5f7-458bf92ae990 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "9c00e485-fd59-4571-abd5-80ca5e3bac1b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.154s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.050896] env[69927]: DEBUG nova.network.neutron [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Successfully updated port: 4c8ce9d5-2863-409f-9eb4-7e69c28015e7 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1006.084147] env[69927]: DEBUG oslo_vmware.api [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528958e3-91a4-ca32-44fc-093ef40444f1, 'name': SearchDatastore_Task, 'duration_secs': 0.031102} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.088776] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe4315b2-c9d3-4873-98b9-ccc420a27adc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.097461] env[69927]: DEBUG oslo_vmware.api [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1006.097461] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52138476-4b09-832e-e9fc-5d4340a31b2a" [ 1006.097461] env[69927]: _type = "Task" [ 1006.097461] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.110612] env[69927]: DEBUG oslo_vmware.api [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52138476-4b09-832e-e9fc-5d4340a31b2a, 'name': SearchDatastore_Task, 'duration_secs': 0.011626} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.110909] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1006.111269] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 4b7934f8-2c97-480b-8af7-f09f6819e2b6/4b7934f8-2c97-480b-8af7-f09f6819e2b6.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1006.112647] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7fb05f5e-13f5-48af-9ce3-a399a6baf65b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.121518] env[69927]: DEBUG oslo_vmware.api [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1006.121518] env[69927]: value = "task-4096281" [ 1006.121518] env[69927]: _type = "Task" [ 1006.121518] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.131431] env[69927]: DEBUG oslo_vmware.api [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096281, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.180536] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096280, 'name': ReconfigVM_Task, 'duration_secs': 0.440067} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.180863] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Reconfigured VM instance instance-00000049 to attach disk [datastore2] 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7/7554b5e2-dcc3-421f-9fe9-a309c9aa03b7.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1006.181588] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-45273ce7-3145-4183-8cde-3d350e1bf6fc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.190855] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1006.190855] env[69927]: value = "task-4096282" [ 1006.190855] env[69927]: _type = "Task" [ 1006.190855] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.216121] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096282, 'name': Rename_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.318022] env[69927]: DEBUG oslo_concurrency.lockutils [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.146s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.320459] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.456s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.322613] env[69927]: INFO nova.compute.claims [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1006.352029] env[69927]: INFO nova.scheduler.client.report [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Deleted allocations for instance 30d9d1ac-4be0-4723-86b5-0aceda88e67b [ 1006.438300] env[69927]: INFO nova.compute.manager [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Rebuilding instance [ 1006.461712] env[69927]: DEBUG nova.network.neutron [req-2a95fdbf-6ea9-4dff-b3cb-a8d2a0983ebe req-d6fa99fc-146a-4e5f-abf4-f3135674fb8a service nova] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Updated VIF entry in instance network info cache for port af2d617c-7a43-466f-b19d-3cce0c52c836. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1006.462231] env[69927]: DEBUG nova.network.neutron [req-2a95fdbf-6ea9-4dff-b3cb-a8d2a0983ebe req-d6fa99fc-146a-4e5f-abf4-f3135674fb8a service nova] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Updating instance_info_cache with network_info: [{"id": "af2d617c-7a43-466f-b19d-3cce0c52c836", "address": "fa:16:3e:ea:1d:36", "network": {"id": "bd90fedf-a086-42e6-9814-07044a035f6e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-951403655-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cf6bb3492c642aa9a168e484299289c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf2d617c-7a", "ovs_interfaceid": "af2d617c-7a43-466f-b19d-3cce0c52c836", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.496352] env[69927]: DEBUG nova.compute.manager [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1006.497674] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0651c7df-12f6-497d-9c6b-3893ebe6c960 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.501988] env[69927]: DEBUG nova.network.neutron [req-db2c1531-5dae-4451-bfae-04e1648b0508 req-049fbe24-00c0-44b5-ac7c-8dba30a1d005 service nova] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Updated VIF entry in instance network info cache for port 3edeb122-1a25-4dcd-93fc-1dcf798a6da1. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1006.502398] env[69927]: DEBUG nova.network.neutron [req-db2c1531-5dae-4451-bfae-04e1648b0508 req-049fbe24-00c0-44b5-ac7c-8dba30a1d005 service nova] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Updating instance_info_cache with network_info: [{"id": "3edeb122-1a25-4dcd-93fc-1dcf798a6da1", "address": "fa:16:3e:b9:9c:54", "network": {"id": "efde2a11-36e6-487a-aeb4-7807070b2fe5", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-389765351-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "71f04d90c8ca48209037157448596060", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5514c5a3-1294-40ad-ae96-29d5c24a3d95", "external-id": "nsx-vlan-transportzone-179", "segmentation_id": 179, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3edeb122-1a", "ovs_interfaceid": "3edeb122-1a25-4dcd-93fc-1dcf798a6da1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.633476] env[69927]: DEBUG oslo_vmware.api [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096281, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.701690] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096282, 'name': Rename_Task, 'duration_secs': 0.179414} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.701840] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1006.702116] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a7638bfd-de58-4983-aeab-07efc2330d8e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.711063] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1006.711063] env[69927]: value = "task-4096283" [ 1006.711063] env[69927]: _type = "Task" [ 1006.711063] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.721127] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096283, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.869752] env[69927]: DEBUG oslo_concurrency.lockutils [None req-05bffbec-c902-4a3f-a33f-66a4dc971bcf tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "30d9d1ac-4be0-4723-86b5-0aceda88e67b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.860s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.966120] env[69927]: DEBUG oslo_concurrency.lockutils [req-2a95fdbf-6ea9-4dff-b3cb-a8d2a0983ebe req-d6fa99fc-146a-4e5f-abf4-f3135674fb8a service nova] Releasing lock "refresh_cache-4b7934f8-2c97-480b-8af7-f09f6819e2b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1006.966120] env[69927]: DEBUG nova.compute.manager [req-2a95fdbf-6ea9-4dff-b3cb-a8d2a0983ebe req-d6fa99fc-146a-4e5f-abf4-f3135674fb8a service nova] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Received event network-vif-deleted-09ee92f2-f98d-47b8-81a8-b99cdce409e0 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1007.006290] env[69927]: DEBUG oslo_concurrency.lockutils [req-db2c1531-5dae-4451-bfae-04e1648b0508 req-049fbe24-00c0-44b5-ac7c-8dba30a1d005 service nova] Releasing lock "refresh_cache-7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1007.133750] env[69927]: DEBUG oslo_vmware.api [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096281, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.60667} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.134073] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 4b7934f8-2c97-480b-8af7-f09f6819e2b6/4b7934f8-2c97-480b-8af7-f09f6819e2b6.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1007.134329] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1007.134647] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-87c6536e-2dc9-4a31-ae32-a9d25ea05b6e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.143418] env[69927]: DEBUG oslo_vmware.api [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1007.143418] env[69927]: value = "task-4096284" [ 1007.143418] env[69927]: _type = "Task" [ 1007.143418] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.153792] env[69927]: DEBUG oslo_vmware.api [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096284, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.221975] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096283, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.373465] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "9c00e485-fd59-4571-abd5-80ca5e3bac1b" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.373848] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "9c00e485-fd59-4571-abd5-80ca5e3bac1b" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.374037] env[69927]: INFO nova.compute.manager [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Shelving [ 1007.518050] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1007.518454] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d00381a-a13e-4a54-afc9-4e342ae07874 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.530694] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 1007.530694] env[69927]: value = "task-4096285" [ 1007.530694] env[69927]: _type = "Task" [ 1007.530694] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.542471] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096285, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.660406] env[69927]: DEBUG oslo_vmware.api [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096284, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.15638} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.660695] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1007.661533] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb626a9-1d09-4663-b2ad-812f4e45a6bf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.669400] env[69927]: DEBUG nova.compute.manager [req-e23b3b0f-2784-4905-a174-6a758559d919 req-1391f82c-99fe-433d-bbd0-f1aeea4e05f2 service nova] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Received event network-vif-plugged-4c8ce9d5-2863-409f-9eb4-7e69c28015e7 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1007.669624] env[69927]: DEBUG oslo_concurrency.lockutils [req-e23b3b0f-2784-4905-a174-6a758559d919 req-1391f82c-99fe-433d-bbd0-f1aeea4e05f2 service nova] Acquiring lock "93d19a66-f00e-4fa8-9eed-32035b020ba2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.669832] env[69927]: DEBUG oslo_concurrency.lockutils [req-e23b3b0f-2784-4905-a174-6a758559d919 req-1391f82c-99fe-433d-bbd0-f1aeea4e05f2 service nova] Lock "93d19a66-f00e-4fa8-9eed-32035b020ba2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.669999] env[69927]: DEBUG oslo_concurrency.lockutils [req-e23b3b0f-2784-4905-a174-6a758559d919 req-1391f82c-99fe-433d-bbd0-f1aeea4e05f2 service nova] Lock "93d19a66-f00e-4fa8-9eed-32035b020ba2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.670181] env[69927]: DEBUG nova.compute.manager [req-e23b3b0f-2784-4905-a174-6a758559d919 req-1391f82c-99fe-433d-bbd0-f1aeea4e05f2 service nova] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] No waiting events found dispatching network-vif-plugged-4c8ce9d5-2863-409f-9eb4-7e69c28015e7 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1007.670349] env[69927]: WARNING nova.compute.manager [req-e23b3b0f-2784-4905-a174-6a758559d919 req-1391f82c-99fe-433d-bbd0-f1aeea4e05f2 service nova] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Received unexpected event network-vif-plugged-4c8ce9d5-2863-409f-9eb4-7e69c28015e7 for instance with vm_state building and task_state spawning. [ 1007.670506] env[69927]: DEBUG nova.compute.manager [req-e23b3b0f-2784-4905-a174-6a758559d919 req-1391f82c-99fe-433d-bbd0-f1aeea4e05f2 service nova] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Received event network-changed-4c8ce9d5-2863-409f-9eb4-7e69c28015e7 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1007.670661] env[69927]: DEBUG nova.compute.manager [req-e23b3b0f-2784-4905-a174-6a758559d919 req-1391f82c-99fe-433d-bbd0-f1aeea4e05f2 service nova] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Refreshing instance network info cache due to event network-changed-4c8ce9d5-2863-409f-9eb4-7e69c28015e7. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1007.670844] env[69927]: DEBUG oslo_concurrency.lockutils [req-e23b3b0f-2784-4905-a174-6a758559d919 req-1391f82c-99fe-433d-bbd0-f1aeea4e05f2 service nova] Acquiring lock "refresh_cache-93d19a66-f00e-4fa8-9eed-32035b020ba2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.671017] env[69927]: DEBUG oslo_concurrency.lockutils [req-e23b3b0f-2784-4905-a174-6a758559d919 req-1391f82c-99fe-433d-bbd0-f1aeea4e05f2 service nova] Acquired lock "refresh_cache-93d19a66-f00e-4fa8-9eed-32035b020ba2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1007.671187] env[69927]: DEBUG nova.network.neutron [req-e23b3b0f-2784-4905-a174-6a758559d919 req-1391f82c-99fe-433d-bbd0-f1aeea4e05f2 service nova] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Refreshing network info cache for port 4c8ce9d5-2863-409f-9eb4-7e69c28015e7 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1007.695527] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] 4b7934f8-2c97-480b-8af7-f09f6819e2b6/4b7934f8-2c97-480b-8af7-f09f6819e2b6.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1007.700211] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ee5c994-2050-4723-bab8-96b9136c3e57 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.727393] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096283, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.729186] env[69927]: DEBUG oslo_vmware.api [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1007.729186] env[69927]: value = "task-4096286" [ 1007.729186] env[69927]: _type = "Task" [ 1007.729186] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.742067] env[69927]: DEBUG oslo_vmware.api [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096286, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.901625] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1572b659-55da-4850-b846-d1e496e53b55 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.910815] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-685ccced-8d4d-4312-8422-eaa0e3df2bbc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.947607] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07def747-7c5e-455a-90e6-14f6803395ae {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.956509] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a57e05b7-65ff-4fa2-959e-d2c8f6b788eb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.975248] env[69927]: DEBUG nova.compute.provider_tree [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1008.042484] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096285, 'name': PowerOffVM_Task, 'duration_secs': 0.321073} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.042791] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1008.043049] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1008.043840] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-215ed7f5-d582-4b70-88c6-7141ed2f7747 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.051727] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1008.051989] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d320d408-e048-4045-80cf-3b569bb84551 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.137044] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1008.137044] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1008.137400] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Deleting the datastore file [datastore2] 256319c4-817d-4267-8531-a65f0f8cd0b6 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1008.137548] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4f70b162-2527-463d-8a21-6d70fae50871 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.147756] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 1008.147756] env[69927]: value = "task-4096288" [ 1008.147756] env[69927]: _type = "Task" [ 1008.147756] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.158865] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096288, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.183122] env[69927]: DEBUG nova.network.neutron [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Successfully updated port: 802f8075-d76e-4020-be35-7cbf0f84fc4a {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1008.227895] env[69927]: DEBUG oslo_vmware.api [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096283, 'name': PowerOnVM_Task, 'duration_secs': 1.237823} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.229110] env[69927]: DEBUG nova.network.neutron [req-e23b3b0f-2784-4905-a174-6a758559d919 req-1391f82c-99fe-433d-bbd0-f1aeea4e05f2 service nova] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1008.230769] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1008.230988] env[69927]: INFO nova.compute.manager [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Took 9.86 seconds to spawn the instance on the hypervisor. [ 1008.231189] env[69927]: DEBUG nova.compute.manager [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1008.232418] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd7d42c-03ce-4f2a-9efc-c140eb14b8f7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.251458] env[69927]: DEBUG oslo_vmware.api [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096286, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.384370] env[69927]: DEBUG nova.network.neutron [req-e23b3b0f-2784-4905-a174-6a758559d919 req-1391f82c-99fe-433d-bbd0-f1aeea4e05f2 service nova] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.391248] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1008.391248] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb2db6ee-7820-45c8-bd3b-ea0834568071 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.399359] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1008.399359] env[69927]: value = "task-4096289" [ 1008.399359] env[69927]: _type = "Task" [ 1008.399359] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.409813] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096289, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.481322] env[69927]: DEBUG nova.scheduler.client.report [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1008.658813] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096288, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.239939} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.659175] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1008.659382] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1008.659566] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1008.686712] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Acquiring lock "refresh_cache-93d19a66-f00e-4fa8-9eed-32035b020ba2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.746433] env[69927]: DEBUG oslo_vmware.api [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096286, 'name': ReconfigVM_Task, 'duration_secs': 0.611732} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.746433] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Reconfigured VM instance instance-0000004a to attach disk [datastore2] 4b7934f8-2c97-480b-8af7-f09f6819e2b6/4b7934f8-2c97-480b-8af7-f09f6819e2b6.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1008.746433] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dcbb0442-a6eb-4b3f-956c-2f1f263b375c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.758803] env[69927]: DEBUG oslo_vmware.api [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1008.758803] env[69927]: value = "task-4096290" [ 1008.758803] env[69927]: _type = "Task" [ 1008.758803] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.773419] env[69927]: INFO nova.compute.manager [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Took 56.86 seconds to build instance. [ 1008.780598] env[69927]: DEBUG oslo_vmware.api [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096290, 'name': Rename_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.887833] env[69927]: DEBUG oslo_concurrency.lockutils [req-e23b3b0f-2784-4905-a174-6a758559d919 req-1391f82c-99fe-433d-bbd0-f1aeea4e05f2 service nova] Releasing lock "refresh_cache-93d19a66-f00e-4fa8-9eed-32035b020ba2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.887833] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Acquired lock "refresh_cache-93d19a66-f00e-4fa8-9eed-32035b020ba2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1008.887833] env[69927]: DEBUG nova.network.neutron [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1008.914118] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096289, 'name': PowerOffVM_Task, 'duration_secs': 0.368818} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.914425] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1008.915679] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda3204d-ea3c-403a-8932-d9113b749b84 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.941217] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c0180e-6922-4813-a25f-8068729a2943 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.947276] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquiring lock "0f5643d4-52f3-4cba-b71b-9c4370175e35" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1008.947514] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "0f5643d4-52f3-4cba-b71b-9c4370175e35" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1008.986209] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.666s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.987295] env[69927]: DEBUG nova.compute.manager [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1008.995310] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 39.063s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.061044] env[69927]: DEBUG oslo_concurrency.lockutils [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Acquiring lock "c87680be-227e-4a3e-92d3-c2310623bfe4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.061324] env[69927]: DEBUG oslo_concurrency.lockutils [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Lock "c87680be-227e-4a3e-92d3-c2310623bfe4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.061545] env[69927]: DEBUG oslo_concurrency.lockutils [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Acquiring lock "c87680be-227e-4a3e-92d3-c2310623bfe4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.061759] env[69927]: DEBUG oslo_concurrency.lockutils [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Lock "c87680be-227e-4a3e-92d3-c2310623bfe4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.062032] env[69927]: DEBUG oslo_concurrency.lockutils [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Lock "c87680be-227e-4a3e-92d3-c2310623bfe4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1009.066659] env[69927]: INFO nova.compute.manager [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Terminating instance [ 1009.275130] env[69927]: DEBUG oslo_vmware.api [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096290, 'name': Rename_Task, 'duration_secs': 0.174467} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.275130] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1009.275130] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f9d88ad-7cc0-4549-9ffd-411f6faf1da6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.276924] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1d9731cb-cd08-4942-9a07-d56808f8d2e2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "7554b5e2-dcc3-421f-9fe9-a309c9aa03b7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.149s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1009.282806] env[69927]: DEBUG oslo_vmware.api [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1009.282806] env[69927]: value = "task-4096291" [ 1009.282806] env[69927]: _type = "Task" [ 1009.282806] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.293950] env[69927]: DEBUG oslo_vmware.api [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096291, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.442571] env[69927]: DEBUG nova.network.neutron [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1009.453653] env[69927]: DEBUG nova.compute.manager [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1009.461026] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Creating Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1009.461026] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-fd305c3e-9f7a-448f-b8c4-820ec0e7e6fb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.475349] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1009.475349] env[69927]: value = "task-4096292" [ 1009.475349] env[69927]: _type = "Task" [ 1009.475349] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.490147] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096292, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.496196] env[69927]: DEBUG nova.compute.utils [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1009.499458] env[69927]: DEBUG nova.compute.manager [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1009.499458] env[69927]: DEBUG nova.network.neutron [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1009.503698] env[69927]: INFO nova.compute.claims [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1009.571027] env[69927]: DEBUG nova.compute.manager [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1009.571286] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1009.572203] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e848ce80-f70e-41ab-a18d-795d7a98d3f7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.580453] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1009.580794] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52f73473-b86e-4749-98eb-5bc69398137b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.586114] env[69927]: DEBUG nova.policy [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9961f0c39b32467b88878373a3374aae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de0b560d18954fd68f7eceeb96c37055', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1009.664258] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1009.664665] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1009.665446] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Deleting the datastore file [datastore1] c87680be-227e-4a3e-92d3-c2310623bfe4 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1009.665814] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6ebc3570-8e12-4421-84a1-eceaeb89eba5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.675882] env[69927]: DEBUG oslo_vmware.api [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Waiting for the task: (returnval){ [ 1009.675882] env[69927]: value = "task-4096294" [ 1009.675882] env[69927]: _type = "Task" [ 1009.675882] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.688641] env[69927]: DEBUG oslo_vmware.api [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096294, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.709016] env[69927]: DEBUG nova.virt.hardware [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1009.709281] env[69927]: DEBUG nova.virt.hardware [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1009.709403] env[69927]: DEBUG nova.virt.hardware [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1009.709880] env[69927]: DEBUG nova.virt.hardware [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1009.709880] env[69927]: DEBUG nova.virt.hardware [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1009.710032] env[69927]: DEBUG nova.virt.hardware [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1009.710156] env[69927]: DEBUG nova.virt.hardware [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1009.710339] env[69927]: DEBUG nova.virt.hardware [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1009.710530] env[69927]: DEBUG nova.virt.hardware [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1009.710709] env[69927]: DEBUG nova.virt.hardware [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1009.710908] env[69927]: DEBUG nova.virt.hardware [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1009.711892] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6e5296-4529-40f4-9379-e8add1f51615 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.725132] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03d317a0-6324-4daf-beea-5d0744ad52cb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.731320] env[69927]: DEBUG nova.compute.manager [req-4ea956dc-0404-40df-a682-84fa0d134c90 req-13eca288-eee1-4cba-83b0-3cb0de3af6e6 service nova] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Received event network-vif-plugged-802f8075-d76e-4020-be35-7cbf0f84fc4a {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1009.731563] env[69927]: DEBUG oslo_concurrency.lockutils [req-4ea956dc-0404-40df-a682-84fa0d134c90 req-13eca288-eee1-4cba-83b0-3cb0de3af6e6 service nova] Acquiring lock "93d19a66-f00e-4fa8-9eed-32035b020ba2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.731787] env[69927]: DEBUG oslo_concurrency.lockutils [req-4ea956dc-0404-40df-a682-84fa0d134c90 req-13eca288-eee1-4cba-83b0-3cb0de3af6e6 service nova] Lock "93d19a66-f00e-4fa8-9eed-32035b020ba2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.731974] env[69927]: DEBUG oslo_concurrency.lockutils [req-4ea956dc-0404-40df-a682-84fa0d134c90 req-13eca288-eee1-4cba-83b0-3cb0de3af6e6 service nova] Lock "93d19a66-f00e-4fa8-9eed-32035b020ba2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1009.732172] env[69927]: DEBUG nova.compute.manager [req-4ea956dc-0404-40df-a682-84fa0d134c90 req-13eca288-eee1-4cba-83b0-3cb0de3af6e6 service nova] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] No waiting events found dispatching network-vif-plugged-802f8075-d76e-4020-be35-7cbf0f84fc4a {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1009.732343] env[69927]: WARNING nova.compute.manager [req-4ea956dc-0404-40df-a682-84fa0d134c90 req-13eca288-eee1-4cba-83b0-3cb0de3af6e6 service nova] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Received unexpected event network-vif-plugged-802f8075-d76e-4020-be35-7cbf0f84fc4a for instance with vm_state building and task_state spawning. [ 1009.734041] env[69927]: DEBUG nova.compute.manager [req-4ea956dc-0404-40df-a682-84fa0d134c90 req-13eca288-eee1-4cba-83b0-3cb0de3af6e6 service nova] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Received event network-changed-802f8075-d76e-4020-be35-7cbf0f84fc4a {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1009.734295] env[69927]: DEBUG nova.compute.manager [req-4ea956dc-0404-40df-a682-84fa0d134c90 req-13eca288-eee1-4cba-83b0-3cb0de3af6e6 service nova] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Refreshing instance network info cache due to event network-changed-802f8075-d76e-4020-be35-7cbf0f84fc4a. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1009.734500] env[69927]: DEBUG oslo_concurrency.lockutils [req-4ea956dc-0404-40df-a682-84fa0d134c90 req-13eca288-eee1-4cba-83b0-3cb0de3af6e6 service nova] Acquiring lock "refresh_cache-93d19a66-f00e-4fa8-9eed-32035b020ba2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.752127] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:23:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4c8a5d7c-ee1f-4a41-94e4-db31e85a398d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '85825835-357a-42a3-81f4-b55d7e165b65', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1009.763457] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1009.764904] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1009.765435] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-605b56fc-ca28-43f4-84d9-18a2bbdce31f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.788958] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1009.788958] env[69927]: value = "task-4096295" [ 1009.788958] env[69927]: _type = "Task" [ 1009.788958] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.798038] env[69927]: DEBUG oslo_vmware.api [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096291, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.803743] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096295, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.985968] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096292, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.990413] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1010.002248] env[69927]: DEBUG nova.compute.manager [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1010.014600] env[69927]: INFO nova.compute.resource_tracker [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Updating resource usage from migration 9ad951cc-c116-4a21-8eba-8383479bc85e [ 1010.033774] env[69927]: DEBUG nova.network.neutron [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Updating instance_info_cache with network_info: [{"id": "4c8ce9d5-2863-409f-9eb4-7e69c28015e7", "address": "fa:16:3e:e6:8c:4c", "network": {"id": "04ed005d-e7b5-498b-ae86-d5973a49b209", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1415626569", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.252", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6be96c1988054f0894a0b91881870c3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c8ce9d5-28", "ovs_interfaceid": "4c8ce9d5-2863-409f-9eb4-7e69c28015e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "802f8075-d76e-4020-be35-7cbf0f84fc4a", "address": "fa:16:3e:ff:33:59", "network": {"id": "2b58c05e-ed4f-47a2-9cc3-5d6094954d47", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-95989038", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "6be96c1988054f0894a0b91881870c3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9643129c-1d95-4422-9df1-2c21289bd5d6", "external-id": "nsx-vlan-transportzone-917", "segmentation_id": 917, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap802f8075-d7", "ovs_interfaceid": "802f8075-d76e-4020-be35-7cbf0f84fc4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.084978] env[69927]: DEBUG nova.network.neutron [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Successfully created port: 0083dc02-3370-427b-bd94-c2267d234d68 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1010.195296] env[69927]: DEBUG oslo_vmware.api [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096294, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.20718} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.198371] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1010.198618] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1010.198758] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1010.198931] env[69927]: INFO nova.compute.manager [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1010.199208] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1010.199614] env[69927]: DEBUG nova.compute.manager [-] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1010.199713] env[69927]: DEBUG nova.network.neutron [-] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1010.298739] env[69927]: DEBUG oslo_vmware.api [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096291, 'name': PowerOnVM_Task, 'duration_secs': 0.517706} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.299154] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1010.299343] env[69927]: INFO nova.compute.manager [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Took 9.34 seconds to spawn the instance on the hypervisor. [ 1010.299781] env[69927]: DEBUG nova.compute.manager [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1010.302978] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d5fcb0-b89c-405b-9587-9df79b41c949 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.308929] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096295, 'name': CreateVM_Task, 'duration_secs': 0.400068} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.309540] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1010.310292] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.310471] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1010.310783] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1010.311057] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2c9dd36-b3be-45af-aa56-c5748419827c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.328900] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 1010.328900] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b4ee72-4c03-1de0-564f-cc5ce345f5b7" [ 1010.328900] env[69927]: _type = "Task" [ 1010.328900] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.338286] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b4ee72-4c03-1de0-564f-cc5ce345f5b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.486657] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096292, 'name': CreateSnapshot_Task, 'duration_secs': 0.6485} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.486991] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Created Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1010.487820] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef3a8150-f2b0-479d-a37a-f933c12c9cc5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.539622] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Releasing lock "refresh_cache-93d19a66-f00e-4fa8-9eed-32035b020ba2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1010.539622] env[69927]: DEBUG nova.compute.manager [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Instance network_info: |[{"id": "4c8ce9d5-2863-409f-9eb4-7e69c28015e7", "address": "fa:16:3e:e6:8c:4c", "network": {"id": "04ed005d-e7b5-498b-ae86-d5973a49b209", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1415626569", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.252", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6be96c1988054f0894a0b91881870c3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c8ce9d5-28", "ovs_interfaceid": "4c8ce9d5-2863-409f-9eb4-7e69c28015e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "802f8075-d76e-4020-be35-7cbf0f84fc4a", "address": "fa:16:3e:ff:33:59", "network": {"id": "2b58c05e-ed4f-47a2-9cc3-5d6094954d47", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-95989038", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "6be96c1988054f0894a0b91881870c3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9643129c-1d95-4422-9df1-2c21289bd5d6", "external-id": "nsx-vlan-transportzone-917", "segmentation_id": 917, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap802f8075-d7", "ovs_interfaceid": "802f8075-d76e-4020-be35-7cbf0f84fc4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1010.540151] env[69927]: DEBUG oslo_concurrency.lockutils [req-4ea956dc-0404-40df-a682-84fa0d134c90 req-13eca288-eee1-4cba-83b0-3cb0de3af6e6 service nova] Acquired lock "refresh_cache-93d19a66-f00e-4fa8-9eed-32035b020ba2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1010.540352] env[69927]: DEBUG nova.network.neutron [req-4ea956dc-0404-40df-a682-84fa0d134c90 req-13eca288-eee1-4cba-83b0-3cb0de3af6e6 service nova] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Refreshing network info cache for port 802f8075-d76e-4020-be35-7cbf0f84fc4a {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1010.541580] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:8c:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4c8ce9d5-2863-409f-9eb4-7e69c28015e7', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:33:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9643129c-1d95-4422-9df1-2c21289bd5d6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '802f8075-d76e-4020-be35-7cbf0f84fc4a', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1010.552564] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1010.554884] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1010.557852] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-20f428c2-9edd-4c68-9ccf-10d1085cc49d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.582814] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1010.582814] env[69927]: value = "task-4096296" [ 1010.582814] env[69927]: _type = "Task" [ 1010.582814] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.591805] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096296, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.594173] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-840c6f2d-1de7-4831-9d15-42766a6a8d7a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.604027] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02e935b0-7488-4444-96ab-5821925b90ae {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.642305] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05a162a8-c6e0-4c8d-8049-b8d84f975b3c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.652942] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb45937-da44-4015-a29a-dd2c24b2e236 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.668411] env[69927]: DEBUG nova.compute.provider_tree [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1010.838509] env[69927]: DEBUG nova.compute.manager [req-ce53f724-7eae-4dbb-8e0f-3c952a808590 req-a1c4ba26-5a97-4e98-9d3f-ed016db4cce1 service nova] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Received event network-vif-deleted-a4ba62a2-72b5-465c-bd2e-9ff3bff9da1c {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1010.838509] env[69927]: INFO nova.compute.manager [req-ce53f724-7eae-4dbb-8e0f-3c952a808590 req-a1c4ba26-5a97-4e98-9d3f-ed016db4cce1 service nova] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Neutron deleted interface a4ba62a2-72b5-465c-bd2e-9ff3bff9da1c; detaching it from the instance and deleting it from the info cache [ 1010.838509] env[69927]: DEBUG nova.network.neutron [req-ce53f724-7eae-4dbb-8e0f-3c952a808590 req-a1c4ba26-5a97-4e98-9d3f-ed016db4cce1 service nova] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.839873] env[69927]: INFO nova.compute.manager [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Took 55.77 seconds to build instance. [ 1010.844679] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b4ee72-4c03-1de0-564f-cc5ce345f5b7, 'name': SearchDatastore_Task, 'duration_secs': 0.022327} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.845305] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1010.845531] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1010.845760] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.845983] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1010.846103] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1010.846462] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-307735bf-9212-46ea-bcb9-f871198e0fab {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.856655] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1010.856856] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1010.857620] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfafb088-6aba-4862-9288-5afdff61da69 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.864232] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 1010.864232] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e60a3b-e63b-261a-e827-baf26eb11046" [ 1010.864232] env[69927]: _type = "Task" [ 1010.864232] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.873181] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e60a3b-e63b-261a-e827-baf26eb11046, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.010922] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Creating linked-clone VM from snapshot {{(pid=69927) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1011.011420] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7d579bf7-920a-4e0c-a870-12b80d10b5fd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.017061] env[69927]: DEBUG nova.compute.manager [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1011.027294] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1011.027294] env[69927]: value = "task-4096297" [ 1011.027294] env[69927]: _type = "Task" [ 1011.027294] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.036891] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096297, 'name': CloneVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.055487] env[69927]: DEBUG nova.virt.hardware [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1011.055736] env[69927]: DEBUG nova.virt.hardware [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1011.055901] env[69927]: DEBUG nova.virt.hardware [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1011.056116] env[69927]: DEBUG nova.virt.hardware [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1011.056276] env[69927]: DEBUG nova.virt.hardware [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1011.056425] env[69927]: DEBUG nova.virt.hardware [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1011.056682] env[69927]: DEBUG nova.virt.hardware [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1011.056881] env[69927]: DEBUG nova.virt.hardware [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1011.057120] env[69927]: DEBUG nova.virt.hardware [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1011.057306] env[69927]: DEBUG nova.virt.hardware [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1011.057488] env[69927]: DEBUG nova.virt.hardware [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1011.061032] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c2b9cc1-64bb-4341-ae34-1c14ed6b165a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.071485] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49def3a7-00e9-467d-b2aa-84cb5593a6fe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.097869] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096296, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.171686] env[69927]: DEBUG nova.scheduler.client.report [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1011.176543] env[69927]: DEBUG nova.network.neutron [-] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.346553] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1449223b-d767-473c-afe2-b9d5a7bc7cf5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "4b7934f8-2c97-480b-8af7-f09f6819e2b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.255s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1011.349927] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ffd709fa-294d-4496-9ec1-31a49702851f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.360719] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d7df398-41d8-48b8-930a-c30d8d96dae9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.383205] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e60a3b-e63b-261a-e827-baf26eb11046, 'name': SearchDatastore_Task, 'duration_secs': 0.017496} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.384066] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-749eeb2a-5cca-43e3-a74d-9031decbf152 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.401329] env[69927]: DEBUG nova.compute.manager [req-ce53f724-7eae-4dbb-8e0f-3c952a808590 req-a1c4ba26-5a97-4e98-9d3f-ed016db4cce1 service nova] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Detach interface failed, port_id=a4ba62a2-72b5-465c-bd2e-9ff3bff9da1c, reason: Instance c87680be-227e-4a3e-92d3-c2310623bfe4 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1011.403332] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 1011.403332] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f4e11c-d108-5063-c60e-59fc9d4a5cdf" [ 1011.403332] env[69927]: _type = "Task" [ 1011.403332] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.412879] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f4e11c-d108-5063-c60e-59fc9d4a5cdf, 'name': SearchDatastore_Task, 'duration_secs': 0.014387} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.413280] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1011.414589] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 256319c4-817d-4267-8531-a65f0f8cd0b6/256319c4-817d-4267-8531-a65f0f8cd0b6.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1011.414932] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7500d12-63c1-418c-830e-346f0ad5431c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.425755] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 1011.425755] env[69927]: value = "task-4096298" [ 1011.425755] env[69927]: _type = "Task" [ 1011.425755] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.436114] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096298, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.495285] env[69927]: DEBUG nova.network.neutron [req-4ea956dc-0404-40df-a682-84fa0d134c90 req-13eca288-eee1-4cba-83b0-3cb0de3af6e6 service nova] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Updated VIF entry in instance network info cache for port 802f8075-d76e-4020-be35-7cbf0f84fc4a. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1011.495992] env[69927]: DEBUG nova.network.neutron [req-4ea956dc-0404-40df-a682-84fa0d134c90 req-13eca288-eee1-4cba-83b0-3cb0de3af6e6 service nova] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Updating instance_info_cache with network_info: [{"id": "4c8ce9d5-2863-409f-9eb4-7e69c28015e7", "address": "fa:16:3e:e6:8c:4c", "network": {"id": "04ed005d-e7b5-498b-ae86-d5973a49b209", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1415626569", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.252", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6be96c1988054f0894a0b91881870c3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c8ce9d5-28", "ovs_interfaceid": "4c8ce9d5-2863-409f-9eb4-7e69c28015e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "802f8075-d76e-4020-be35-7cbf0f84fc4a", "address": "fa:16:3e:ff:33:59", "network": {"id": "2b58c05e-ed4f-47a2-9cc3-5d6094954d47", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-95989038", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "6be96c1988054f0894a0b91881870c3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9643129c-1d95-4422-9df1-2c21289bd5d6", "external-id": "nsx-vlan-transportzone-917", "segmentation_id": 917, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap802f8075-d7", "ovs_interfaceid": "802f8075-d76e-4020-be35-7cbf0f84fc4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.541543] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096297, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.600531] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096296, 'name': CreateVM_Task, 'duration_secs': 0.72275} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.601395] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1011.602413] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.602643] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1011.603130] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1011.603567] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45502383-d089-4957-9774-25ff5e88de04 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.609950] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Waiting for the task: (returnval){ [ 1011.609950] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525b1c75-9ec2-a480-fa9e-3636a3a31f77" [ 1011.609950] env[69927]: _type = "Task" [ 1011.609950] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.621157] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525b1c75-9ec2-a480-fa9e-3636a3a31f77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.679647] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.684s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1011.682186] env[69927]: INFO nova.compute.manager [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Migrating [ 1011.690575] env[69927]: INFO nova.compute.manager [-] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Took 1.49 seconds to deallocate network for instance. [ 1011.690575] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.390s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.690575] env[69927]: DEBUG nova.objects.instance [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Lazy-loading 'resources' on Instance uuid 27e20d58-1150-4b90-b888-d84aff1954ef {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1011.798453] env[69927]: DEBUG nova.network.neutron [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Successfully updated port: 0083dc02-3370-427b-bd94-c2267d234d68 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1011.835362] env[69927]: DEBUG nova.compute.manager [req-2b06886b-dd26-4f0c-adf7-3e5d22b79b5d req-4a356d4c-d504-4098-a401-827716233b17 service nova] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Received event network-changed-f0303e15-89fc-4eb7-825c-9b0cea4b5718 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1011.835786] env[69927]: DEBUG nova.compute.manager [req-2b06886b-dd26-4f0c-adf7-3e5d22b79b5d req-4a356d4c-d504-4098-a401-827716233b17 service nova] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Refreshing instance network info cache due to event network-changed-f0303e15-89fc-4eb7-825c-9b0cea4b5718. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1011.836212] env[69927]: DEBUG oslo_concurrency.lockutils [req-2b06886b-dd26-4f0c-adf7-3e5d22b79b5d req-4a356d4c-d504-4098-a401-827716233b17 service nova] Acquiring lock "refresh_cache-7554b5e2-dcc3-421f-9fe9-a309c9aa03b7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.836715] env[69927]: DEBUG oslo_concurrency.lockutils [req-2b06886b-dd26-4f0c-adf7-3e5d22b79b5d req-4a356d4c-d504-4098-a401-827716233b17 service nova] Acquired lock "refresh_cache-7554b5e2-dcc3-421f-9fe9-a309c9aa03b7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1011.836915] env[69927]: DEBUG nova.network.neutron [req-2b06886b-dd26-4f0c-adf7-3e5d22b79b5d req-4a356d4c-d504-4098-a401-827716233b17 service nova] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Refreshing network info cache for port f0303e15-89fc-4eb7-825c-9b0cea4b5718 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1011.940810] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096298, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.004741] env[69927]: DEBUG oslo_concurrency.lockutils [req-4ea956dc-0404-40df-a682-84fa0d134c90 req-13eca288-eee1-4cba-83b0-3cb0de3af6e6 service nova] Releasing lock "refresh_cache-93d19a66-f00e-4fa8-9eed-32035b020ba2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1012.039244] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096297, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.122048] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525b1c75-9ec2-a480-fa9e-3636a3a31f77, 'name': SearchDatastore_Task, 'duration_secs': 0.012779} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.122224] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1012.122461] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1012.122702] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.122848] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1012.123040] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1012.123316] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c1c8e6d-aec5-4a97-b3ba-ae0732a48df0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.135270] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1012.135472] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1012.136222] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-659f17ce-ddc5-47e4-9101-c0041906b2c5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.143743] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Waiting for the task: (returnval){ [ 1012.143743] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52001620-ac50-57ca-ee7e-5c0fa353cbb3" [ 1012.143743] env[69927]: _type = "Task" [ 1012.143743] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.153791] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52001620-ac50-57ca-ee7e-5c0fa353cbb3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.215818] env[69927]: DEBUG oslo_concurrency.lockutils [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.216889] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "refresh_cache-5581f8af-9796-48ad-a2f3-557e90d9662a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.216889] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired lock "refresh_cache-5581f8af-9796-48ad-a2f3-557e90d9662a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1012.217156] env[69927]: DEBUG nova.network.neutron [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1012.301030] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "refresh_cache-0c8e43a3-3f33-4a41-81d3-a98565dca4a7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.301171] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquired lock "refresh_cache-0c8e43a3-3f33-4a41-81d3-a98565dca4a7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1012.301277] env[69927]: DEBUG nova.network.neutron [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1012.440908] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096298, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.577412} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.441211] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 256319c4-817d-4267-8531-a65f0f8cd0b6/256319c4-817d-4267-8531-a65f0f8cd0b6.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1012.441477] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1012.441691] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cb79c580-9cfa-4722-b77f-0f99da670c06 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.451701] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 1012.451701] env[69927]: value = "task-4096299" [ 1012.451701] env[69927]: _type = "Task" [ 1012.451701] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.461628] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096299, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.545896] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096297, 'name': CloneVM_Task, 'duration_secs': 1.502858} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.547310] env[69927]: INFO nova.virt.vmwareapi.vmops [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Created linked-clone VM from snapshot [ 1012.548083] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013ba7b9-c3c3-45ea-b9a5-bd1580db9a2e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.557455] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Uploading image 03c12814-dab7-40e7-ab90-e04f02e070f6 {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1012.596580] env[69927]: DEBUG oslo_vmware.rw_handles [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1012.596580] env[69927]: value = "vm-811502" [ 1012.596580] env[69927]: _type = "VirtualMachine" [ 1012.596580] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1012.596973] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-be6e0c58-5f23-4d18-bb59-01e4b2662f14 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.605423] env[69927]: DEBUG oslo_vmware.rw_handles [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lease: (returnval){ [ 1012.605423] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b2430a-c34f-941b-5e41-62d5980790be" [ 1012.605423] env[69927]: _type = "HttpNfcLease" [ 1012.605423] env[69927]: } obtained for exporting VM: (result){ [ 1012.605423] env[69927]: value = "vm-811502" [ 1012.605423] env[69927]: _type = "VirtualMachine" [ 1012.605423] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1012.605661] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the lease: (returnval){ [ 1012.605661] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b2430a-c34f-941b-5e41-62d5980790be" [ 1012.605661] env[69927]: _type = "HttpNfcLease" [ 1012.605661] env[69927]: } to be ready. {{(pid=69927) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1012.609538] env[69927]: DEBUG nova.network.neutron [req-2b06886b-dd26-4f0c-adf7-3e5d22b79b5d req-4a356d4c-d504-4098-a401-827716233b17 service nova] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Updated VIF entry in instance network info cache for port f0303e15-89fc-4eb7-825c-9b0cea4b5718. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1012.609889] env[69927]: DEBUG nova.network.neutron [req-2b06886b-dd26-4f0c-adf7-3e5d22b79b5d req-4a356d4c-d504-4098-a401-827716233b17 service nova] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Updating instance_info_cache with network_info: [{"id": "f0303e15-89fc-4eb7-825c-9b0cea4b5718", "address": "fa:16:3e:2e:8e:93", "network": {"id": "095e5f02-dbfd-4d96-bd31-c8c5e870e449", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-240997183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "71a47794e5824701925ad4bdc3651196", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7b7edd0-124a-48ec-ae26-1aa14f9b884a", "external-id": "nsx-vlan-transportzone-861", "segmentation_id": 861, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0303e15-89", "ovs_interfaceid": "f0303e15-89fc-4eb7-825c-9b0cea4b5718", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.614898] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1012.614898] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b2430a-c34f-941b-5e41-62d5980790be" [ 1012.614898] env[69927]: _type = "HttpNfcLease" [ 1012.614898] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1012.658867] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52001620-ac50-57ca-ee7e-5c0fa353cbb3, 'name': SearchDatastore_Task, 'duration_secs': 0.064607} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.659740] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-635f4e84-6848-431f-bd9d-1838cf9eeb48 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.666036] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Waiting for the task: (returnval){ [ 1012.666036] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f51c46-805a-fa65-750c-df9c4d0b11ca" [ 1012.666036] env[69927]: _type = "Task" [ 1012.666036] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.677537] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f51c46-805a-fa65-750c-df9c4d0b11ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.694672] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb45c9da-0082-4b54-8d28-9c10857fb295 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.705052] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1231703-c99e-4cb9-bb20-8f2ce5fedba7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.738100] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbf088fa-fae0-4850-9f39-961145c3b43a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.748591] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-496016aa-b8b9-408e-aa41-b1339c9b7121 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.763601] env[69927]: DEBUG nova.compute.provider_tree [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1012.840832] env[69927]: DEBUG nova.network.neutron [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1012.949230] env[69927]: DEBUG nova.network.neutron [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Updating instance_info_cache with network_info: [{"id": "fc4d69e0-0a53-4c34-8f56-6416a884b018", "address": "fa:16:3e:f8:cb:f0", "network": {"id": "625b5644-8a8f-4789-8c96-083982c1bf4c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-124597975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61b1aea0ccf049c8942ba32932412497", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc4d69e0-0a", "ovs_interfaceid": "fc4d69e0-0a53-4c34-8f56-6416a884b018", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.964026] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096299, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091958} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.964026] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1012.964026] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c074140-8124-49a0-b0f5-ba5cc1486d5f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.989030] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] 256319c4-817d-4267-8531-a65f0f8cd0b6/256319c4-817d-4267-8531-a65f0f8cd0b6.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1012.989030] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf7303fa-8a9a-4d07-9439-9add355ac484 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.017260] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 1013.017260] env[69927]: value = "task-4096301" [ 1013.017260] env[69927]: _type = "Task" [ 1013.017260] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.030163] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096301, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.114867] env[69927]: DEBUG oslo_concurrency.lockutils [req-2b06886b-dd26-4f0c-adf7-3e5d22b79b5d req-4a356d4c-d504-4098-a401-827716233b17 service nova] Releasing lock "refresh_cache-7554b5e2-dcc3-421f-9fe9-a309c9aa03b7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1013.115240] env[69927]: DEBUG nova.compute.manager [req-2b06886b-dd26-4f0c-adf7-3e5d22b79b5d req-4a356d4c-d504-4098-a401-827716233b17 service nova] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Received event network-changed-af2d617c-7a43-466f-b19d-3cce0c52c836 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1013.115346] env[69927]: DEBUG nova.compute.manager [req-2b06886b-dd26-4f0c-adf7-3e5d22b79b5d req-4a356d4c-d504-4098-a401-827716233b17 service nova] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Refreshing instance network info cache due to event network-changed-af2d617c-7a43-466f-b19d-3cce0c52c836. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1013.115562] env[69927]: DEBUG oslo_concurrency.lockutils [req-2b06886b-dd26-4f0c-adf7-3e5d22b79b5d req-4a356d4c-d504-4098-a401-827716233b17 service nova] Acquiring lock "refresh_cache-4b7934f8-2c97-480b-8af7-f09f6819e2b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.115794] env[69927]: DEBUG oslo_concurrency.lockutils [req-2b06886b-dd26-4f0c-adf7-3e5d22b79b5d req-4a356d4c-d504-4098-a401-827716233b17 service nova] Acquired lock "refresh_cache-4b7934f8-2c97-480b-8af7-f09f6819e2b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1013.116033] env[69927]: DEBUG nova.network.neutron [req-2b06886b-dd26-4f0c-adf7-3e5d22b79b5d req-4a356d4c-d504-4098-a401-827716233b17 service nova] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Refreshing network info cache for port af2d617c-7a43-466f-b19d-3cce0c52c836 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1013.117363] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1013.117363] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b2430a-c34f-941b-5e41-62d5980790be" [ 1013.117363] env[69927]: _type = "HttpNfcLease" [ 1013.117363] env[69927]: } is ready. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1013.121052] env[69927]: DEBUG nova.network.neutron [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Updating instance_info_cache with network_info: [{"id": "0083dc02-3370-427b-bd94-c2267d234d68", "address": "fa:16:3e:6f:14:cb", "network": {"id": "e48c0c04-ce1d-41db-8e56-0954dfca129f", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2007911751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0b560d18954fd68f7eceeb96c37055", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0083dc02-33", "ovs_interfaceid": "0083dc02-3370-427b-bd94-c2267d234d68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.121052] env[69927]: DEBUG oslo_vmware.rw_handles [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1013.121052] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b2430a-c34f-941b-5e41-62d5980790be" [ 1013.121052] env[69927]: _type = "HttpNfcLease" [ 1013.121052] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1013.121320] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd6fdb4-b071-4299-9465-ae2435533265 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.131462] env[69927]: DEBUG oslo_vmware.rw_handles [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d338a9-4fcc-2f60-c306-06b3f5f342c9/disk-0.vmdk from lease info. {{(pid=69927) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1013.131761] env[69927]: DEBUG oslo_vmware.rw_handles [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d338a9-4fcc-2f60-c306-06b3f5f342c9/disk-0.vmdk for reading. {{(pid=69927) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1013.200844] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f51c46-805a-fa65-750c-df9c4d0b11ca, 'name': SearchDatastore_Task, 'duration_secs': 0.079275} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.201293] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1013.201797] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 93d19a66-f00e-4fa8-9eed-32035b020ba2/93d19a66-f00e-4fa8-9eed-32035b020ba2.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1013.202160] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-729a33f1-a501-4eda-99fc-aa2eb89db748 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.211355] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Waiting for the task: (returnval){ [ 1013.211355] env[69927]: value = "task-4096302" [ 1013.211355] env[69927]: _type = "Task" [ 1013.211355] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.221240] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096302, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.231809] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-331e1d51-0a89-4cb8-bb4f-c5d7b95307af {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.267347] env[69927]: DEBUG nova.scheduler.client.report [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1013.451490] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Releasing lock "refresh_cache-5581f8af-9796-48ad-a2f3-557e90d9662a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1013.531760] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096301, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.623568] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Releasing lock "refresh_cache-0c8e43a3-3f33-4a41-81d3-a98565dca4a7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1013.623726] env[69927]: DEBUG nova.compute.manager [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Instance network_info: |[{"id": "0083dc02-3370-427b-bd94-c2267d234d68", "address": "fa:16:3e:6f:14:cb", "network": {"id": "e48c0c04-ce1d-41db-8e56-0954dfca129f", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2007911751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0b560d18954fd68f7eceeb96c37055", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0083dc02-33", "ovs_interfaceid": "0083dc02-3370-427b-bd94-c2267d234d68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1013.624349] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:14:cb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0083dc02-3370-427b-bd94-c2267d234d68', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1013.634210] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1013.635256] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1013.636549] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0a0e009d-78e5-4f12-aee7-54a5409eb8cf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.662226] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1013.662226] env[69927]: value = "task-4096303" [ 1013.662226] env[69927]: _type = "Task" [ 1013.662226] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.678506] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096303, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.726782] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096302, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.773603] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.084s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.779202] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.287s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.779618] env[69927]: DEBUG nova.objects.instance [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lazy-loading 'resources' on Instance uuid 95c02aa2-d587-4c9f-9b02-2992dfe5b1be {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1013.807578] env[69927]: INFO nova.scheduler.client.report [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Deleted allocations for instance 27e20d58-1150-4b90-b888-d84aff1954ef [ 1014.033908] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096301, 'name': ReconfigVM_Task, 'duration_secs': 0.597542} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.034904] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Reconfigured VM instance instance-00000025 to attach disk [datastore1] 256319c4-817d-4267-8531-a65f0f8cd0b6/256319c4-817d-4267-8531-a65f0f8cd0b6.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1014.036539] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4ecc88c7-5107-4604-828e-730692758217 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.045980] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 1014.045980] env[69927]: value = "task-4096304" [ 1014.045980] env[69927]: _type = "Task" [ 1014.045980] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.061612] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096304, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.068851] env[69927]: DEBUG nova.network.neutron [req-2b06886b-dd26-4f0c-adf7-3e5d22b79b5d req-4a356d4c-d504-4098-a401-827716233b17 service nova] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Updated VIF entry in instance network info cache for port af2d617c-7a43-466f-b19d-3cce0c52c836. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1014.069278] env[69927]: DEBUG nova.network.neutron [req-2b06886b-dd26-4f0c-adf7-3e5d22b79b5d req-4a356d4c-d504-4098-a401-827716233b17 service nova] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Updating instance_info_cache with network_info: [{"id": "af2d617c-7a43-466f-b19d-3cce0c52c836", "address": "fa:16:3e:ea:1d:36", "network": {"id": "bd90fedf-a086-42e6-9814-07044a035f6e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-951403655-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cf6bb3492c642aa9a168e484299289c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf2d617c-7a", "ovs_interfaceid": "af2d617c-7a43-466f-b19d-3cce0c52c836", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.078404] env[69927]: DEBUG nova.compute.manager [req-1122f180-f2c4-4f8a-b371-68a3f99754f9 req-5a4d4389-bd6e-4b56-9d1f-2dca4a67e36b service nova] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Received event network-changed-0083dc02-3370-427b-bd94-c2267d234d68 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1014.078404] env[69927]: DEBUG nova.compute.manager [req-1122f180-f2c4-4f8a-b371-68a3f99754f9 req-5a4d4389-bd6e-4b56-9d1f-2dca4a67e36b service nova] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Refreshing instance network info cache due to event network-changed-0083dc02-3370-427b-bd94-c2267d234d68. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1014.078404] env[69927]: DEBUG oslo_concurrency.lockutils [req-1122f180-f2c4-4f8a-b371-68a3f99754f9 req-5a4d4389-bd6e-4b56-9d1f-2dca4a67e36b service nova] Acquiring lock "refresh_cache-0c8e43a3-3f33-4a41-81d3-a98565dca4a7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.078404] env[69927]: DEBUG oslo_concurrency.lockutils [req-1122f180-f2c4-4f8a-b371-68a3f99754f9 req-5a4d4389-bd6e-4b56-9d1f-2dca4a67e36b service nova] Acquired lock "refresh_cache-0c8e43a3-3f33-4a41-81d3-a98565dca4a7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1014.078404] env[69927]: DEBUG nova.network.neutron [req-1122f180-f2c4-4f8a-b371-68a3f99754f9 req-5a4d4389-bd6e-4b56-9d1f-2dca4a67e36b service nova] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Refreshing network info cache for port 0083dc02-3370-427b-bd94-c2267d234d68 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1014.175850] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096303, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.225576] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096302, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.559664} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.226139] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 93d19a66-f00e-4fa8-9eed-32035b020ba2/93d19a66-f00e-4fa8-9eed-32035b020ba2.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1014.226139] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1014.226275] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c8e161eb-7ba0-408d-b65c-48057f0186a3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.236350] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Waiting for the task: (returnval){ [ 1014.236350] env[69927]: value = "task-4096305" [ 1014.236350] env[69927]: _type = "Task" [ 1014.236350] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.247564] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096305, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.318214] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7d5843a0-aa38-445d-8241-9c4a16f59d31 tempest-ServerShowV257Test-1753666914 tempest-ServerShowV257Test-1753666914-project-member] Lock "27e20d58-1150-4b90-b888-d84aff1954ef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.346s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.558271] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096304, 'name': Rename_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.574950] env[69927]: DEBUG oslo_concurrency.lockutils [req-2b06886b-dd26-4f0c-adf7-3e5d22b79b5d req-4a356d4c-d504-4098-a401-827716233b17 service nova] Releasing lock "refresh_cache-4b7934f8-2c97-480b-8af7-f09f6819e2b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1014.575263] env[69927]: DEBUG nova.compute.manager [req-2b06886b-dd26-4f0c-adf7-3e5d22b79b5d req-4a356d4c-d504-4098-a401-827716233b17 service nova] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Received event network-vif-plugged-0083dc02-3370-427b-bd94-c2267d234d68 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1014.575460] env[69927]: DEBUG oslo_concurrency.lockutils [req-2b06886b-dd26-4f0c-adf7-3e5d22b79b5d req-4a356d4c-d504-4098-a401-827716233b17 service nova] Acquiring lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1014.575671] env[69927]: DEBUG oslo_concurrency.lockutils [req-2b06886b-dd26-4f0c-adf7-3e5d22b79b5d req-4a356d4c-d504-4098-a401-827716233b17 service nova] Lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1014.575848] env[69927]: DEBUG oslo_concurrency.lockutils [req-2b06886b-dd26-4f0c-adf7-3e5d22b79b5d req-4a356d4c-d504-4098-a401-827716233b17 service nova] Lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.576452] env[69927]: DEBUG nova.compute.manager [req-2b06886b-dd26-4f0c-adf7-3e5d22b79b5d req-4a356d4c-d504-4098-a401-827716233b17 service nova] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] No waiting events found dispatching network-vif-plugged-0083dc02-3370-427b-bd94-c2267d234d68 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1014.576839] env[69927]: WARNING nova.compute.manager [req-2b06886b-dd26-4f0c-adf7-3e5d22b79b5d req-4a356d4c-d504-4098-a401-827716233b17 service nova] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Received unexpected event network-vif-plugged-0083dc02-3370-427b-bd94-c2267d234d68 for instance with vm_state building and task_state spawning. [ 1014.675561] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096303, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.744679] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a00b57c1-c9aa-4007-ab75-2c63dff164ff {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.752157] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096305, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081559} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.753061] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1014.753927] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e3ba105-f467-4b6d-8760-3b576a26a47f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.760143] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf20c19-d025-4508-bcfb-521a9d63f37f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.785567] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 93d19a66-f00e-4fa8-9eed-32035b020ba2/93d19a66-f00e-4fa8-9eed-32035b020ba2.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1014.789596] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2029a478-6e7c-4e5d-b376-84af2262b57a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.838719] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4103877c-62d5-4b13-9a5c-fb2b7f2650ac {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.843893] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Waiting for the task: (returnval){ [ 1014.843893] env[69927]: value = "task-4096306" [ 1014.843893] env[69927]: _type = "Task" [ 1014.843893] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.852318] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb18845d-2df1-43c1-8317-2d71dca234d8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.860269] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096306, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.873056] env[69927]: DEBUG nova.compute.provider_tree [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1014.912500] env[69927]: DEBUG nova.network.neutron [req-1122f180-f2c4-4f8a-b371-68a3f99754f9 req-5a4d4389-bd6e-4b56-9d1f-2dca4a67e36b service nova] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Updated VIF entry in instance network info cache for port 0083dc02-3370-427b-bd94-c2267d234d68. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1014.912868] env[69927]: DEBUG nova.network.neutron [req-1122f180-f2c4-4f8a-b371-68a3f99754f9 req-5a4d4389-bd6e-4b56-9d1f-2dca4a67e36b service nova] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Updating instance_info_cache with network_info: [{"id": "0083dc02-3370-427b-bd94-c2267d234d68", "address": "fa:16:3e:6f:14:cb", "network": {"id": "e48c0c04-ce1d-41db-8e56-0954dfca129f", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2007911751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0b560d18954fd68f7eceeb96c37055", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0083dc02-33", "ovs_interfaceid": "0083dc02-3370-427b-bd94-c2267d234d68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.967349] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c542d35-e4c4-4911-ad6f-d4dce82f5ed2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.990609] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Updating instance '5581f8af-9796-48ad-a2f3-557e90d9662a' progress to 0 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1015.060268] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096304, 'name': Rename_Task, 'duration_secs': 0.752648} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.060635] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1015.060955] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-490b63d6-d6ba-455f-87a1-df3dafd1ffff {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.069257] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 1015.069257] env[69927]: value = "task-4096307" [ 1015.069257] env[69927]: _type = "Task" [ 1015.069257] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.079432] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096307, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.175042] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096303, 'name': CreateVM_Task, 'duration_secs': 1.438947} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.175151] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1015.175869] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.176109] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1015.176421] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1015.176748] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2db9f94-2244-4ca6-aca5-894594237d04 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.182648] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 1015.182648] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522352c5-d098-4f0e-888b-89924aa5fd40" [ 1015.182648] env[69927]: _type = "Task" [ 1015.182648] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.192611] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522352c5-d098-4f0e-888b-89924aa5fd40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.355192] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096306, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.376898] env[69927]: DEBUG nova.scheduler.client.report [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1015.419667] env[69927]: DEBUG oslo_concurrency.lockutils [req-1122f180-f2c4-4f8a-b371-68a3f99754f9 req-5a4d4389-bd6e-4b56-9d1f-2dca4a67e36b service nova] Releasing lock "refresh_cache-0c8e43a3-3f33-4a41-81d3-a98565dca4a7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1015.499881] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1015.500413] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4a500e75-7474-4647-9698-fb07578196aa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.509893] env[69927]: DEBUG oslo_vmware.api [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1015.509893] env[69927]: value = "task-4096308" [ 1015.509893] env[69927]: _type = "Task" [ 1015.509893] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.519641] env[69927]: DEBUG oslo_vmware.api [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096308, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.589276] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096307, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.694497] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522352c5-d098-4f0e-888b-89924aa5fd40, 'name': SearchDatastore_Task, 'duration_secs': 0.012031} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.694814] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1015.695078] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1015.695334] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.695486] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1015.695671] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1015.695977] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a6b17a89-6b8f-493d-8893-eed1800789f5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.711055] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1015.711298] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1015.712087] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24be1cf5-8341-44d1-8514-f5766107aa7a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.718929] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 1015.718929] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52aa1589-220e-7741-6885-44c2979522ce" [ 1015.718929] env[69927]: _type = "Task" [ 1015.718929] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.730096] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52aa1589-220e-7741-6885-44c2979522ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.855940] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096306, 'name': ReconfigVM_Task, 'duration_secs': 0.536464} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.856230] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 93d19a66-f00e-4fa8-9eed-32035b020ba2/93d19a66-f00e-4fa8-9eed-32035b020ba2.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1015.857033] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bd90369a-8703-49f1-bee3-331c4d134b70 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.863793] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Waiting for the task: (returnval){ [ 1015.863793] env[69927]: value = "task-4096309" [ 1015.863793] env[69927]: _type = "Task" [ 1015.863793] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.874040] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096309, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.883247] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.104s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.885843] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 43.729s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.886104] env[69927]: DEBUG nova.objects.instance [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lazy-loading 'resources' on Instance uuid a2b1684f-82af-42fc-925e-db36f31cfe63 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1016.000570] env[69927]: INFO nova.scheduler.client.report [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Deleted allocations for instance 95c02aa2-d587-4c9f-9b02-2992dfe5b1be [ 1016.019854] env[69927]: DEBUG oslo_vmware.api [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096308, 'name': PowerOffVM_Task, 'duration_secs': 0.247807} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.020256] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1016.020460] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Updating instance '5581f8af-9796-48ad-a2f3-557e90d9662a' progress to 17 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1016.080227] env[69927]: DEBUG oslo_vmware.api [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096307, 'name': PowerOnVM_Task, 'duration_secs': 0.792369} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.080227] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1016.080227] env[69927]: DEBUG nova.compute.manager [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1016.080960] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7550a4e8-bc3c-435a-b2d1-b2a6da4d29b9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.232259] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52aa1589-220e-7741-6885-44c2979522ce, 'name': SearchDatastore_Task, 'duration_secs': 0.016418} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.233096] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-957e6e96-9be1-4c47-9efd-6475b116cc92 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.239886] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 1016.239886] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5296a3d3-a00f-1644-b46a-b9be94218269" [ 1016.239886] env[69927]: _type = "Task" [ 1016.239886] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.250049] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5296a3d3-a00f-1644-b46a-b9be94218269, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.375118] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096309, 'name': Rename_Task, 'duration_secs': 0.288272} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.375421] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1016.375780] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5d96c29e-b513-442c-a4ba-0d274d3e8b91 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.384652] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Waiting for the task: (returnval){ [ 1016.384652] env[69927]: value = "task-4096310" [ 1016.384652] env[69927]: _type = "Task" [ 1016.384652] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.388913] env[69927]: DEBUG nova.objects.instance [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lazy-loading 'numa_topology' on Instance uuid a2b1684f-82af-42fc-925e-db36f31cfe63 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1016.396334] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096310, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.508855] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5b4b4f71-6125-47bf-8971-dfa1f9ac5106 tempest-VolumesAdminNegativeTest-1491739297 tempest-VolumesAdminNegativeTest-1491739297-project-member] Lock "95c02aa2-d587-4c9f-9b02-2992dfe5b1be" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.633s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1016.527755] env[69927]: DEBUG nova.virt.hardware [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:35:01Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1016.528018] env[69927]: DEBUG nova.virt.hardware [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1016.528188] env[69927]: DEBUG nova.virt.hardware [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1016.528378] env[69927]: DEBUG nova.virt.hardware [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1016.528526] env[69927]: DEBUG nova.virt.hardware [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1016.528681] env[69927]: DEBUG nova.virt.hardware [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1016.528897] env[69927]: DEBUG nova.virt.hardware [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1016.529070] env[69927]: DEBUG nova.virt.hardware [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1016.529252] env[69927]: DEBUG nova.virt.hardware [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1016.529419] env[69927]: DEBUG nova.virt.hardware [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1016.529592] env[69927]: DEBUG nova.virt.hardware [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1016.535103] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad515412-2cf4-4bb9-9ea0-86beaf64e11f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.553384] env[69927]: DEBUG oslo_vmware.api [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1016.553384] env[69927]: value = "task-4096311" [ 1016.553384] env[69927]: _type = "Task" [ 1016.553384] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.563736] env[69927]: DEBUG oslo_vmware.api [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096311, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.600306] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.753247] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5296a3d3-a00f-1644-b46a-b9be94218269, 'name': SearchDatastore_Task, 'duration_secs': 0.014938} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.753702] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1016.753994] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 0c8e43a3-3f33-4a41-81d3-a98565dca4a7/0c8e43a3-3f33-4a41-81d3-a98565dca4a7.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1016.754282] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5d8f21dd-e9fb-449d-b747-4cec7d17ca07 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.762197] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 1016.762197] env[69927]: value = "task-4096312" [ 1016.762197] env[69927]: _type = "Task" [ 1016.762197] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.773434] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096312, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.896158] env[69927]: DEBUG nova.objects.base [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=69927) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1016.899026] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096310, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.064062] env[69927]: DEBUG oslo_vmware.api [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096311, 'name': ReconfigVM_Task, 'duration_secs': 0.348281} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.067145] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Updating instance '5581f8af-9796-48ad-a2f3-557e90d9662a' progress to 33 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1017.279700] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096312, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.394402] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f62075-35d4-441d-aae6-dd504a22f1c7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.405954] env[69927]: DEBUG oslo_vmware.api [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096310, 'name': PowerOnVM_Task, 'duration_secs': 0.962556} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.407201] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1017.407738] env[69927]: INFO nova.compute.manager [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Took 12.22 seconds to spawn the instance on the hypervisor. [ 1017.408643] env[69927]: DEBUG nova.compute.manager [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1017.409745] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a26c81-1577-4f5e-8920-34a05a8bdfed {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.419032] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40605a74-bd19-45c6-9baf-efdc6e5bad0c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.457825] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc7f7d0c-654e-4cdd-abde-6f963be49afa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.468231] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ce67d0-b351-432e-9841-f1f56f055cfa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.488100] env[69927]: DEBUG nova.compute.provider_tree [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1017.575264] env[69927]: DEBUG nova.virt.hardware [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1017.575776] env[69927]: DEBUG nova.virt.hardware [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1017.575776] env[69927]: DEBUG nova.virt.hardware [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1017.575977] env[69927]: DEBUG nova.virt.hardware [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1017.576153] env[69927]: DEBUG nova.virt.hardware [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1017.576351] env[69927]: DEBUG nova.virt.hardware [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1017.576578] env[69927]: DEBUG nova.virt.hardware [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1017.576827] env[69927]: DEBUG nova.virt.hardware [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1017.577049] env[69927]: DEBUG nova.virt.hardware [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1017.577261] env[69927]: DEBUG nova.virt.hardware [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1017.577444] env[69927]: DEBUG nova.virt.hardware [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1017.584234] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Reconfiguring VM instance instance-00000041 to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1017.584706] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae7e220c-5bad-43d2-aeea-2713804b7316 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.606397] env[69927]: DEBUG oslo_vmware.api [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1017.606397] env[69927]: value = "task-4096313" [ 1017.606397] env[69927]: _type = "Task" [ 1017.606397] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.616431] env[69927]: DEBUG oslo_concurrency.lockutils [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "67e00c40-35b6-4a9f-9505-19b804e78c04" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1017.616620] env[69927]: DEBUG oslo_concurrency.lockutils [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "67e00c40-35b6-4a9f-9505-19b804e78c04" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.616875] env[69927]: DEBUG oslo_concurrency.lockutils [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "67e00c40-35b6-4a9f-9505-19b804e78c04-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1017.617089] env[69927]: DEBUG oslo_concurrency.lockutils [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "67e00c40-35b6-4a9f-9505-19b804e78c04-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.617352] env[69927]: DEBUG oslo_concurrency.lockutils [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "67e00c40-35b6-4a9f-9505-19b804e78c04-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1017.619513] env[69927]: INFO nova.compute.manager [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Terminating instance [ 1017.622442] env[69927]: DEBUG oslo_vmware.api [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096313, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.781191] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096312, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.723675} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.781594] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 0c8e43a3-3f33-4a41-81d3-a98565dca4a7/0c8e43a3-3f33-4a41-81d3-a98565dca4a7.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1017.781926] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1017.782279] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5ab222ca-858d-4b2a-9cc0-5f8ac778dd14 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.792179] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 1017.792179] env[69927]: value = "task-4096314" [ 1017.792179] env[69927]: _type = "Task" [ 1017.792179] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.805704] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096314, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.969581] env[69927]: INFO nova.compute.manager [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Took 60.56 seconds to build instance. [ 1017.992911] env[69927]: DEBUG nova.scheduler.client.report [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1018.116813] env[69927]: DEBUG oslo_vmware.api [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096313, 'name': ReconfigVM_Task, 'duration_secs': 0.334409} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.117120] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Reconfigured VM instance instance-00000041 to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1018.117939] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c28962e-0c19-4fe8-881d-054fd74dc552 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.133541] env[69927]: DEBUG nova.compute.manager [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1018.133808] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1018.141652] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 5581f8af-9796-48ad-a2f3-557e90d9662a/5581f8af-9796-48ad-a2f3-557e90d9662a.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1018.142483] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9205c32-6189-4bbc-b723-53df88a8cf0c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.145316] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e685778-1b98-4eb5-87cd-9b89e6d13c89 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.165011] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1018.165376] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52330a89-6ed6-4cc6-acc2-4d0f9a72e0ac {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.168590] env[69927]: DEBUG oslo_vmware.api [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1018.168590] env[69927]: value = "task-4096315" [ 1018.168590] env[69927]: _type = "Task" [ 1018.168590] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.174066] env[69927]: DEBUG oslo_vmware.api [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 1018.174066] env[69927]: value = "task-4096316" [ 1018.174066] env[69927]: _type = "Task" [ 1018.174066] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.181529] env[69927]: DEBUG oslo_vmware.api [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096315, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.188334] env[69927]: DEBUG oslo_vmware.api [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096316, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.306206] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096314, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074493} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.306206] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1018.308188] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c32a5c20-c455-40b9-9d9b-346d28b8aba2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.332919] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 0c8e43a3-3f33-4a41-81d3-a98565dca4a7/0c8e43a3-3f33-4a41-81d3-a98565dca4a7.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1018.333058] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-571c5ab3-4870-4306-88f6-224318dcfdfa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.358830] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 1018.358830] env[69927]: value = "task-4096317" [ 1018.358830] env[69927]: _type = "Task" [ 1018.358830] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.370977] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096317, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.472155] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8c704f94-0de1-44de-8caf-833790d77432 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Lock "93d19a66-f00e-4fa8-9eed-32035b020ba2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.849s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1018.501695] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.616s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1018.505204] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.362s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1018.505474] env[69927]: DEBUG nova.objects.instance [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Lazy-loading 'resources' on Instance uuid 50eedb80-d4bc-42c4-9686-6549cbd675b7 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1018.685101] env[69927]: DEBUG oslo_vmware.api [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096315, 'name': ReconfigVM_Task, 'duration_secs': 0.384004} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.685885] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 5581f8af-9796-48ad-a2f3-557e90d9662a/5581f8af-9796-48ad-a2f3-557e90d9662a.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1018.686189] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Updating instance '5581f8af-9796-48ad-a2f3-557e90d9662a' progress to 50 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1018.698274] env[69927]: DEBUG oslo_vmware.api [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096316, 'name': PowerOffVM_Task, 'duration_secs': 0.370343} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.698893] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1018.699093] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1018.699366] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ec240d62-f8fc-47d9-b772-4169c79dc2ba {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.793646] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1018.794966] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1018.794966] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Deleting the datastore file [datastore1] 67e00c40-35b6-4a9f-9505-19b804e78c04 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1018.794966] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-234e1604-cb9b-4703-9d49-4fd0f9596e7f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.802446] env[69927]: DEBUG oslo_vmware.api [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 1018.802446] env[69927]: value = "task-4096319" [ 1018.802446] env[69927]: _type = "Task" [ 1018.802446] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.812063] env[69927]: DEBUG oslo_vmware.api [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096319, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.870676] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096317, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.981197] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Acquiring lock "93d19a66-f00e-4fa8-9eed-32035b020ba2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1018.981594] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Lock "93d19a66-f00e-4fa8-9eed-32035b020ba2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1018.981851] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Acquiring lock "93d19a66-f00e-4fa8-9eed-32035b020ba2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1018.982220] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Lock "93d19a66-f00e-4fa8-9eed-32035b020ba2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1018.982294] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Lock "93d19a66-f00e-4fa8-9eed-32035b020ba2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1018.984558] env[69927]: INFO nova.compute.manager [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Terminating instance [ 1019.020242] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b3a5123b-021b-4876-a0cf-d07418b6b052 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "a2b1684f-82af-42fc-925e-db36f31cfe63" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 66.171s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.020242] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "a2b1684f-82af-42fc-925e-db36f31cfe63" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 44.333s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.020242] env[69927]: INFO nova.compute.manager [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Unshelving [ 1019.203269] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e9b731c-8528-4caf-b2ff-9a555549c95f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.238228] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-945438ea-dd67-4e58-bb9a-aa31973e30b0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.264996] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Updating instance '5581f8af-9796-48ad-a2f3-557e90d9662a' progress to 67 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1019.321746] env[69927]: DEBUG oslo_vmware.api [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096319, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.31112} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.322115] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1019.322344] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1019.322536] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1019.322714] env[69927]: INFO nova.compute.manager [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1019.322973] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1019.324038] env[69927]: DEBUG nova.compute.manager [-] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1019.324150] env[69927]: DEBUG nova.network.neutron [-] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1019.379726] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096317, 'name': ReconfigVM_Task, 'duration_secs': 0.592286} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.379726] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 0c8e43a3-3f33-4a41-81d3-a98565dca4a7/0c8e43a3-3f33-4a41-81d3-a98565dca4a7.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1019.381986] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-34f878e8-47bd-4284-9e41-53e65841b672 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.393314] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 1019.393314] env[69927]: value = "task-4096320" [ 1019.393314] env[69927]: _type = "Task" [ 1019.393314] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.405287] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096320, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.488499] env[69927]: DEBUG nova.compute.manager [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1019.489035] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1019.489856] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7cbf290-65da-4da8-8311-381138969620 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.505361] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1019.505361] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c086ec9f-f16c-400f-8d9e-deb182da19b2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.518513] env[69927]: DEBUG oslo_vmware.api [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Waiting for the task: (returnval){ [ 1019.518513] env[69927]: value = "task-4096321" [ 1019.518513] env[69927]: _type = "Task" [ 1019.518513] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.542217] env[69927]: DEBUG oslo_vmware.api [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096321, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.593246] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d76f132f-7edd-4f52-afed-69372522300f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.603759] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b89c6789-3d6e-4252-b6be-be3c5e3def85 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.651167] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1d3a7fb-ab8a-459d-8638-f63d22226abf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.661363] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa1aee3-6a5c-4569-a495-6f25a0e79299 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.678467] env[69927]: DEBUG nova.compute.provider_tree [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1019.844689] env[69927]: DEBUG nova.compute.manager [req-e3ad7a81-0be5-4560-9f32-afdabefeaf2d req-cd596fba-31d3-4184-a244-5c34ca5ec2a6 service nova] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Received event network-vif-deleted-1fbc7a57-ea01-478c-8517-9b5d862bf7cc {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1019.845363] env[69927]: INFO nova.compute.manager [req-e3ad7a81-0be5-4560-9f32-afdabefeaf2d req-cd596fba-31d3-4184-a244-5c34ca5ec2a6 service nova] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Neutron deleted interface 1fbc7a57-ea01-478c-8517-9b5d862bf7cc; detaching it from the instance and deleting it from the info cache [ 1019.845363] env[69927]: DEBUG nova.network.neutron [req-e3ad7a81-0be5-4560-9f32-afdabefeaf2d req-cd596fba-31d3-4184-a244-5c34ca5ec2a6 service nova] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.858604] env[69927]: DEBUG nova.network.neutron [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Port fc4d69e0-0a53-4c34-8f56-6416a884b018 binding to destination host cpu-1 is already ACTIVE {{(pid=69927) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1019.905801] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096320, 'name': Rename_Task, 'duration_secs': 0.266642} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.906126] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1019.906402] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7b1d9903-f4e3-4c8d-86a9-e26c056d3e98 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.916488] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 1019.916488] env[69927]: value = "task-4096322" [ 1019.916488] env[69927]: _type = "Task" [ 1019.916488] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.929148] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096322, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.030320] env[69927]: DEBUG oslo_vmware.api [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096321, 'name': PowerOffVM_Task, 'duration_secs': 0.410686} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.030646] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1020.030861] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1020.031170] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-26888209-7dc9-4147-a55a-d81c28524094 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.044842] env[69927]: DEBUG nova.compute.utils [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1020.163359] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1020.163737] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1020.164221] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Deleting the datastore file [datastore1] 93d19a66-f00e-4fa8-9eed-32035b020ba2 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1020.164696] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a239f483-c6a2-4a05-8740-6807e5ff752f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.172906] env[69927]: DEBUG oslo_vmware.api [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Waiting for the task: (returnval){ [ 1020.172906] env[69927]: value = "task-4096324" [ 1020.172906] env[69927]: _type = "Task" [ 1020.172906] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.182303] env[69927]: DEBUG oslo_vmware.api [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096324, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.211833] env[69927]: ERROR nova.scheduler.client.report [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] [req-43d220d7-a34c-4e9c-a8ba-bde7c006509d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2f529b36-df5f-4b37-8103-68f74f737726. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-43d220d7-a34c-4e9c-a8ba-bde7c006509d"}]} [ 1020.230946] env[69927]: DEBUG nova.network.neutron [-] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.239137] env[69927]: DEBUG nova.scheduler.client.report [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Refreshing inventories for resource provider 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1020.258948] env[69927]: DEBUG nova.scheduler.client.report [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Updating ProviderTree inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1020.259279] env[69927]: DEBUG nova.compute.provider_tree [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1020.277714] env[69927]: DEBUG nova.scheduler.client.report [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Refreshing aggregate associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, aggregates: None {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1020.306577] env[69927]: DEBUG nova.scheduler.client.report [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Refreshing trait associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1020.348976] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1fd5a09d-b878-4275-8d4e-842750fd4b0d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.359583] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acff6723-ea2d-431a-a482-eda1186e1c06 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.407020] env[69927]: DEBUG nova.compute.manager [req-e3ad7a81-0be5-4560-9f32-afdabefeaf2d req-cd596fba-31d3-4184-a244-5c34ca5ec2a6 service nova] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Detach interface failed, port_id=1fbc7a57-ea01-478c-8517-9b5d862bf7cc, reason: Instance 67e00c40-35b6-4a9f-9505-19b804e78c04 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1020.429585] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096322, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.550423] env[69927]: INFO nova.virt.block_device [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Booting with volume d0e7e15a-d1ef-48e6-8980-78f809252ca0 at /dev/sdb [ 1020.608655] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-485e8c13-7c75-4812-80f0-f3661bdd6f39 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.618823] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70bd1e54-f577-4436-b795-992bfc6afe9a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.667790] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c4edabfb-9e92-40f4-9d05-6fc3eb4ff80f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.683521] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff4519f4-8a20-4a5a-912c-83bdbbd0a950 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.700688] env[69927]: DEBUG oslo_vmware.api [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Task: {'id': task-4096324, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.294657} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.702308] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1020.702308] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1020.702398] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1020.702561] env[69927]: INFO nova.compute.manager [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1020.703151] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1020.703562] env[69927]: DEBUG nova.compute.manager [-] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1020.705566] env[69927]: DEBUG nova.network.neutron [-] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1020.731851] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76560f77-49d6-4dc4-b320-264788f7e241 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.739859] env[69927]: INFO nova.compute.manager [-] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Took 1.41 seconds to deallocate network for instance. [ 1020.746164] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-282946e0-8814-4c1a-930e-e94070ac3b31 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.766358] env[69927]: DEBUG nova.virt.block_device [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Updating existing volume attachment record: 8fd18ad0-83f7-45d0-98c0-f67f71ddcab1 {{(pid=69927) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1020.911505] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "5581f8af-9796-48ad-a2f3-557e90d9662a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.911505] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "5581f8af-9796-48ad-a2f3-557e90d9662a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.911505] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "5581f8af-9796-48ad-a2f3-557e90d9662a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.936698] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e25d014-662b-4c8e-a468-f003298b935d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.939830] env[69927]: DEBUG oslo_vmware.api [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096322, 'name': PowerOnVM_Task, 'duration_secs': 0.976833} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.940341] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1020.940588] env[69927]: INFO nova.compute.manager [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Took 9.92 seconds to spawn the instance on the hypervisor. [ 1020.940800] env[69927]: DEBUG nova.compute.manager [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1020.942479] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a956fbb2-6c0c-4b46-bb28-8371d440293b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.950114] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df5b252d-bd5d-4be3-a06e-ccaafceb926b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.991145] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1317cb7-c875-45bb-af38-209e3e473d6b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.000297] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d105c67-a523-425d-804b-e998c79071f9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.016321] env[69927]: DEBUG nova.compute.provider_tree [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1021.251396] env[69927]: DEBUG oslo_concurrency.lockutils [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1021.468992] env[69927]: INFO nova.compute.manager [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Took 53.63 seconds to build instance. [ 1021.574539] env[69927]: DEBUG nova.scheduler.client.report [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Updated inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 with generation 112 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1021.576678] env[69927]: DEBUG nova.compute.provider_tree [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Updating resource provider 2f529b36-df5f-4b37-8103-68f74f737726 generation from 112 to 113 during operation: update_inventory {{(pid=69927) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1021.576678] env[69927]: DEBUG nova.compute.provider_tree [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1021.883145] env[69927]: DEBUG nova.compute.manager [req-781d2ad6-3c34-45fd-bdf2-5cba8651e78e req-59d1709f-3a14-45b6-bef3-ac82316cc9d9 service nova] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Received event network-vif-deleted-802f8075-d76e-4020-be35-7cbf0f84fc4a {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1021.883394] env[69927]: INFO nova.compute.manager [req-781d2ad6-3c34-45fd-bdf2-5cba8651e78e req-59d1709f-3a14-45b6-bef3-ac82316cc9d9 service nova] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Neutron deleted interface 802f8075-d76e-4020-be35-7cbf0f84fc4a; detaching it from the instance and deleting it from the info cache [ 1021.883754] env[69927]: DEBUG nova.network.neutron [req-781d2ad6-3c34-45fd-bdf2-5cba8651e78e req-59d1709f-3a14-45b6-bef3-ac82316cc9d9 service nova] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Updating instance_info_cache with network_info: [{"id": "4c8ce9d5-2863-409f-9eb4-7e69c28015e7", "address": "fa:16:3e:e6:8c:4c", "network": {"id": "04ed005d-e7b5-498b-ae86-d5973a49b209", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1415626569", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.252", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6be96c1988054f0894a0b91881870c3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c8ce9d5-28", "ovs_interfaceid": "4c8ce9d5-2863-409f-9eb4-7e69c28015e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.973222] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5f51393f-9b59-4f1e-8a81-d9ff8dc80f6d tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.239s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.989394] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "refresh_cache-5581f8af-9796-48ad-a2f3-557e90d9662a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.989622] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired lock "refresh_cache-5581f8af-9796-48ad-a2f3-557e90d9662a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1021.989933] env[69927]: DEBUG nova.network.neutron [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1022.083871] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.579s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.086230] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.906s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.088267] env[69927]: INFO nova.compute.claims [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1022.112380] env[69927]: INFO nova.scheduler.client.report [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Deleted allocations for instance 50eedb80-d4bc-42c4-9686-6549cbd675b7 [ 1022.112949] env[69927]: DEBUG nova.network.neutron [-] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.387509] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1f84b5c3-2f7a-46d8-a81a-d918e5008200 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.400755] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-643e1d55-ef3a-4545-98d3-2174ab17449f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.442110] env[69927]: DEBUG nova.compute.manager [req-781d2ad6-3c34-45fd-bdf2-5cba8651e78e req-59d1709f-3a14-45b6-bef3-ac82316cc9d9 service nova] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Detach interface failed, port_id=802f8075-d76e-4020-be35-7cbf0f84fc4a, reason: Instance 93d19a66-f00e-4fa8-9eed-32035b020ba2 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1022.442493] env[69927]: DEBUG nova.compute.manager [req-781d2ad6-3c34-45fd-bdf2-5cba8651e78e req-59d1709f-3a14-45b6-bef3-ac82316cc9d9 service nova] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Received event network-vif-deleted-4c8ce9d5-2863-409f-9eb4-7e69c28015e7 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1022.442673] env[69927]: INFO nova.compute.manager [req-781d2ad6-3c34-45fd-bdf2-5cba8651e78e req-59d1709f-3a14-45b6-bef3-ac82316cc9d9 service nova] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Neutron deleted interface 4c8ce9d5-2863-409f-9eb4-7e69c28015e7; detaching it from the instance and deleting it from the info cache [ 1022.442900] env[69927]: DEBUG nova.network.neutron [req-781d2ad6-3c34-45fd-bdf2-5cba8651e78e req-59d1709f-3a14-45b6-bef3-ac82316cc9d9 service nova] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.621133] env[69927]: INFO nova.compute.manager [-] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Took 1.92 seconds to deallocate network for instance. [ 1022.625379] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8ece0d8d-d9bb-4517-8714-9e1c523c53e9 tempest-ServersV294TestFqdnHostnames-1367069286 tempest-ServersV294TestFqdnHostnames-1367069286-project-member] Lock "50eedb80-d4bc-42c4-9686-6549cbd675b7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 53.630s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.949595] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9c625514-9431-403e-8957-28931042ad3a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.964301] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d38fb7b7-1934-491c-954a-7120db514056 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.001617] env[69927]: DEBUG nova.compute.manager [req-781d2ad6-3c34-45fd-bdf2-5cba8651e78e req-59d1709f-3a14-45b6-bef3-ac82316cc9d9 service nova] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Detach interface failed, port_id=4c8ce9d5-2863-409f-9eb4-7e69c28015e7, reason: Instance 93d19a66-f00e-4fa8-9eed-32035b020ba2 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1023.004446] env[69927]: DEBUG nova.network.neutron [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Updating instance_info_cache with network_info: [{"id": "fc4d69e0-0a53-4c34-8f56-6416a884b018", "address": "fa:16:3e:f8:cb:f0", "network": {"id": "625b5644-8a8f-4789-8c96-083982c1bf4c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-124597975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61b1aea0ccf049c8942ba32932412497", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc4d69e0-0a", "ovs_interfaceid": "fc4d69e0-0a53-4c34-8f56-6416a884b018", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.131545] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.508431] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Releasing lock "refresh_cache-5581f8af-9796-48ad-a2f3-557e90d9662a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1023.624788] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4725cdf-365e-4f14-b331-e37b78136626 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.633306] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2092a7a9-e4d1-45bc-986c-8921257f1224 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.672396] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e20864-9da0-4dd5-8b8c-d0d1dab89bfe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.682985] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab6fe77-9418-43e9-a944-01fee552d7cb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.702268] env[69927]: DEBUG nova.compute.provider_tree [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1023.957286] env[69927]: DEBUG nova.compute.manager [req-f5154862-59f0-4399-b9c4-7083476ad89f req-0e7851c3-c3af-46b2-ba5e-5c033f90e90f service nova] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Received event network-changed-0083dc02-3370-427b-bd94-c2267d234d68 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1023.957570] env[69927]: DEBUG nova.compute.manager [req-f5154862-59f0-4399-b9c4-7083476ad89f req-0e7851c3-c3af-46b2-ba5e-5c033f90e90f service nova] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Refreshing instance network info cache due to event network-changed-0083dc02-3370-427b-bd94-c2267d234d68. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1023.957855] env[69927]: DEBUG oslo_concurrency.lockutils [req-f5154862-59f0-4399-b9c4-7083476ad89f req-0e7851c3-c3af-46b2-ba5e-5c033f90e90f service nova] Acquiring lock "refresh_cache-0c8e43a3-3f33-4a41-81d3-a98565dca4a7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.958384] env[69927]: DEBUG oslo_concurrency.lockutils [req-f5154862-59f0-4399-b9c4-7083476ad89f req-0e7851c3-c3af-46b2-ba5e-5c033f90e90f service nova] Acquired lock "refresh_cache-0c8e43a3-3f33-4a41-81d3-a98565dca4a7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1023.958384] env[69927]: DEBUG nova.network.neutron [req-f5154862-59f0-4399-b9c4-7083476ad89f req-0e7851c3-c3af-46b2-ba5e-5c033f90e90f service nova] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Refreshing network info cache for port 0083dc02-3370-427b-bd94-c2267d234d68 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1023.961676] env[69927]: DEBUG oslo_vmware.rw_handles [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d338a9-4fcc-2f60-c306-06b3f5f342c9/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1023.963150] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e345b734-e80c-4c06-ad62-bcd44c28b3b4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.971642] env[69927]: DEBUG oslo_vmware.rw_handles [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d338a9-4fcc-2f60-c306-06b3f5f342c9/disk-0.vmdk is in state: ready. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1023.971821] env[69927]: ERROR oslo_vmware.rw_handles [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d338a9-4fcc-2f60-c306-06b3f5f342c9/disk-0.vmdk due to incomplete transfer. [ 1023.972073] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-63aab2c5-2d20-465e-8076-d29ef8ba3788 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.982030] env[69927]: DEBUG oslo_vmware.rw_handles [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d338a9-4fcc-2f60-c306-06b3f5f342c9/disk-0.vmdk. {{(pid=69927) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1023.982030] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Uploaded image 03c12814-dab7-40e7-ab90-e04f02e070f6 to the Glance image server {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1023.984609] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Destroying the VM {{(pid=69927) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1023.985270] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-aad8a0cc-35ab-4215-bc31-be1d65a7da5f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.992299] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1023.992299] env[69927]: value = "task-4096329" [ 1023.992299] env[69927]: _type = "Task" [ 1023.992299] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.002032] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096329, 'name': Destroy_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.031780] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a74b5cb-6d21-4262-9e27-d7a8d7bcaf99 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.054088] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91df1eb-8b39-42cb-b0f1-e59a9eca15b6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.062533] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Updating instance '5581f8af-9796-48ad-a2f3-557e90d9662a' progress to 83 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1024.209766] env[69927]: DEBUG nova.scheduler.client.report [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1024.507416] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096329, 'name': Destroy_Task} progress is 33%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.572166] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1024.572166] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f0b81312-c4eb-4601-b965-92fe114f9881 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.582038] env[69927]: DEBUG oslo_vmware.api [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1024.582038] env[69927]: value = "task-4096330" [ 1024.582038] env[69927]: _type = "Task" [ 1024.582038] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.590063] env[69927]: DEBUG oslo_vmware.api [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096330, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.722029] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.634s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.722029] env[69927]: DEBUG nova.compute.manager [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1024.724261] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.684s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.725985] env[69927]: INFO nova.compute.claims [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1025.007824] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096329, 'name': Destroy_Task} progress is 33%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.043125] env[69927]: DEBUG nova.network.neutron [req-f5154862-59f0-4399-b9c4-7083476ad89f req-0e7851c3-c3af-46b2-ba5e-5c033f90e90f service nova] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Updated VIF entry in instance network info cache for port 0083dc02-3370-427b-bd94-c2267d234d68. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1025.043535] env[69927]: DEBUG nova.network.neutron [req-f5154862-59f0-4399-b9c4-7083476ad89f req-0e7851c3-c3af-46b2-ba5e-5c033f90e90f service nova] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Updating instance_info_cache with network_info: [{"id": "0083dc02-3370-427b-bd94-c2267d234d68", "address": "fa:16:3e:6f:14:cb", "network": {"id": "e48c0c04-ce1d-41db-8e56-0954dfca129f", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2007911751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0b560d18954fd68f7eceeb96c37055", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0083dc02-33", "ovs_interfaceid": "0083dc02-3370-427b-bd94-c2267d234d68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.091667] env[69927]: DEBUG oslo_vmware.api [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096330, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.230862] env[69927]: DEBUG nova.compute.utils [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1025.234530] env[69927]: DEBUG nova.compute.manager [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1025.234871] env[69927]: DEBUG nova.network.neutron [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1025.278417] env[69927]: DEBUG nova.policy [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20629f26389d40199a4c5d5d2312dbae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c2fb1fc4c3ae41a5b331c6be7973eb72', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1025.508375] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096329, 'name': Destroy_Task, 'duration_secs': 1.131094} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.508712] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Destroyed the VM [ 1025.508997] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Deleting Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1025.509383] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c347bdd5-2b32-4ca5-aafb-93d529fc4fa4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.522466] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1025.522466] env[69927]: value = "task-4096331" [ 1025.522466] env[69927]: _type = "Task" [ 1025.522466] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.538615] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096331, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.546741] env[69927]: DEBUG oslo_concurrency.lockutils [req-f5154862-59f0-4399-b9c4-7083476ad89f req-0e7851c3-c3af-46b2-ba5e-5c033f90e90f service nova] Releasing lock "refresh_cache-0c8e43a3-3f33-4a41-81d3-a98565dca4a7" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1025.592825] env[69927]: DEBUG oslo_vmware.api [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096330, 'name': PowerOnVM_Task, 'duration_secs': 0.514136} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.593313] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1025.593413] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d042e57d-2165-4f9c-839b-272bea7b2c29 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Updating instance '5581f8af-9796-48ad-a2f3-557e90d9662a' progress to 100 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1025.735649] env[69927]: DEBUG nova.compute.manager [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1025.917294] env[69927]: DEBUG nova.network.neutron [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Successfully created port: 1d1a9d58-33a0-4ef9-b472-1e66ed5b0450 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1026.034401] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096331, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.281373] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e07028a-ff01-4074-9f83-77ec53dd886b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.295650] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a71bd554-7157-4f74-ab0d-b2d72576caf4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.334223] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-826b4d67-53dd-40d7-ae1d-6b676a677898 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.344376] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5429aa9e-5563-4570-9e5d-48e38f775f57 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.365423] env[69927]: DEBUG nova.compute.provider_tree [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1026.531786] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096331, 'name': RemoveSnapshot_Task, 'duration_secs': 0.914801} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.532767] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Deleted Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1026.535882] env[69927]: DEBUG nova.compute.manager [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1026.535882] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c40ccb-6f08-42f8-ad77-ab5bd673ebe6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.752029] env[69927]: DEBUG nova.compute.manager [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1026.780285] env[69927]: DEBUG nova.virt.hardware [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='8d35c0ddbff0de98a79f30f3ae50f6e1',container_format='bare',created_at=2025-05-13T19:41:49Z,direct_url=,disk_format='vmdk',id=c87f5a71-3793-40fd-ad59-7b537237d25e,min_disk=1,min_ram=0,name='tempest-test-snap-984187675',owner='c2fb1fc4c3ae41a5b331c6be7973eb72',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-05-13T19:42:06Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1026.780804] env[69927]: DEBUG nova.virt.hardware [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1026.781095] env[69927]: DEBUG nova.virt.hardware [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1026.781399] env[69927]: DEBUG nova.virt.hardware [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1026.781669] env[69927]: DEBUG nova.virt.hardware [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1026.781923] env[69927]: DEBUG nova.virt.hardware [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1026.782286] env[69927]: DEBUG nova.virt.hardware [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1026.782564] env[69927]: DEBUG nova.virt.hardware [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1026.783610] env[69927]: DEBUG nova.virt.hardware [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1026.783929] env[69927]: DEBUG nova.virt.hardware [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1026.784254] env[69927]: DEBUG nova.virt.hardware [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1026.785330] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-321be6f7-b49a-4c52-a78b-fb7db283c089 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.798845] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dfb26dd-73f4-4a5f-8644-334979885b98 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.906383] env[69927]: DEBUG nova.scheduler.client.report [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Updated inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 with generation 113 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1026.906805] env[69927]: DEBUG nova.compute.provider_tree [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Updating resource provider 2f529b36-df5f-4b37-8103-68f74f737726 generation from 113 to 114 during operation: update_inventory {{(pid=69927) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1026.907111] env[69927]: DEBUG nova.compute.provider_tree [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1026.924965] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.051682] env[69927]: INFO nova.compute.manager [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Shelve offloading [ 1027.413765] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.689s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.413941] env[69927]: DEBUG nova.compute.manager [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1027.416952] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.647s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.419174] env[69927]: INFO nova.compute.claims [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1027.555876] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1027.556422] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd20231b-8664-4f17-afb6-58069934df84 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.565047] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1027.565047] env[69927]: value = "task-4096332" [ 1027.565047] env[69927]: _type = "Task" [ 1027.565047] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.578429] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] VM already powered off {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1027.578751] env[69927]: DEBUG nova.compute.manager [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1027.579662] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7586d5-2f8c-4001-8acc-65b7b056fb38 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.587038] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "refresh_cache-9c00e485-fd59-4571-abd5-80ca5e3bac1b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.587824] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired lock "refresh_cache-9c00e485-fd59-4571-abd5-80ca5e3bac1b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1027.588165] env[69927]: DEBUG nova.network.neutron [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1027.661743] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Acquiring lock "c6f166c7-538f-4c8a-9500-48319c694ea0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.662805] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Lock "c6f166c7-538f-4c8a-9500-48319c694ea0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.671287] env[69927]: DEBUG nova.compute.manager [req-eeff7ddf-7c2e-4549-b619-ae119d920ea8 req-d12a9822-0ee9-4654-830d-df898f4f0f44 service nova] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Received event network-vif-plugged-1d1a9d58-33a0-4ef9-b472-1e66ed5b0450 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1027.671287] env[69927]: DEBUG oslo_concurrency.lockutils [req-eeff7ddf-7c2e-4549-b619-ae119d920ea8 req-d12a9822-0ee9-4654-830d-df898f4f0f44 service nova] Acquiring lock "b750ce2c-ee85-46c6-bf12-edb3f088e6de-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.671287] env[69927]: DEBUG oslo_concurrency.lockutils [req-eeff7ddf-7c2e-4549-b619-ae119d920ea8 req-d12a9822-0ee9-4654-830d-df898f4f0f44 service nova] Lock "b750ce2c-ee85-46c6-bf12-edb3f088e6de-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.671287] env[69927]: DEBUG oslo_concurrency.lockutils [req-eeff7ddf-7c2e-4549-b619-ae119d920ea8 req-d12a9822-0ee9-4654-830d-df898f4f0f44 service nova] Lock "b750ce2c-ee85-46c6-bf12-edb3f088e6de-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.671480] env[69927]: DEBUG nova.compute.manager [req-eeff7ddf-7c2e-4549-b619-ae119d920ea8 req-d12a9822-0ee9-4654-830d-df898f4f0f44 service nova] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] No waiting events found dispatching network-vif-plugged-1d1a9d58-33a0-4ef9-b472-1e66ed5b0450 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1027.671581] env[69927]: WARNING nova.compute.manager [req-eeff7ddf-7c2e-4549-b619-ae119d920ea8 req-d12a9822-0ee9-4654-830d-df898f4f0f44 service nova] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Received unexpected event network-vif-plugged-1d1a9d58-33a0-4ef9-b472-1e66ed5b0450 for instance with vm_state building and task_state spawning. [ 1027.777622] env[69927]: DEBUG nova.network.neutron [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Successfully updated port: 1d1a9d58-33a0-4ef9-b472-1e66ed5b0450 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1027.912606] env[69927]: DEBUG oslo_concurrency.lockutils [None req-50a623fd-926e-48f9-8e25-3467fb717f1e tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "5581f8af-9796-48ad-a2f3-557e90d9662a" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.912606] env[69927]: DEBUG oslo_concurrency.lockutils [None req-50a623fd-926e-48f9-8e25-3467fb717f1e tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "5581f8af-9796-48ad-a2f3-557e90d9662a" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.912606] env[69927]: DEBUG nova.compute.manager [None req-50a623fd-926e-48f9-8e25-3467fb717f1e tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Going to confirm migration 3 {{(pid=69927) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1027.924926] env[69927]: DEBUG nova.compute.utils [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1027.926729] env[69927]: DEBUG nova.compute.manager [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1027.927132] env[69927]: DEBUG nova.network.neutron [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1028.008493] env[69927]: DEBUG nova.policy [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '482f17df9f874808b99defabed52bc8e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5fa2f3f0abc7474196dfbee4f8c09d3f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1028.167333] env[69927]: DEBUG nova.compute.manager [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1028.280512] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "refresh_cache-b750ce2c-ee85-46c6-bf12-edb3f088e6de" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.280694] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquired lock "refresh_cache-b750ce2c-ee85-46c6-bf12-edb3f088e6de" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.280870] env[69927]: DEBUG nova.network.neutron [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1028.348090] env[69927]: DEBUG nova.network.neutron [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Updating instance_info_cache with network_info: [{"id": "89f92ae9-a8f4-402a-b248-cc3ad9bf67d7", "address": "fa:16:3e:c4:9b:91", "network": {"id": "2cac2664-2b1d-4bb6-a58a-f8e96679d038", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1945522269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3afde63c8cbe4aecb32a470fd6b948f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89f92ae9-a8", "ovs_interfaceid": "89f92ae9-a8f4-402a-b248-cc3ad9bf67d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.427538] env[69927]: DEBUG nova.compute.manager [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1028.504538] env[69927]: DEBUG oslo_concurrency.lockutils [None req-50a623fd-926e-48f9-8e25-3467fb717f1e tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "refresh_cache-5581f8af-9796-48ad-a2f3-557e90d9662a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.504760] env[69927]: DEBUG oslo_concurrency.lockutils [None req-50a623fd-926e-48f9-8e25-3467fb717f1e tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired lock "refresh_cache-5581f8af-9796-48ad-a2f3-557e90d9662a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.504958] env[69927]: DEBUG nova.network.neutron [None req-50a623fd-926e-48f9-8e25-3467fb717f1e tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1028.505167] env[69927]: DEBUG nova.objects.instance [None req-50a623fd-926e-48f9-8e25-3467fb717f1e tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lazy-loading 'info_cache' on Instance uuid 5581f8af-9796-48ad-a2f3-557e90d9662a {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1028.690972] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.739991] env[69927]: DEBUG nova.network.neutron [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Successfully created port: 8c5792b5-80a0-4414-bb3b-ae6e25874202 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1028.821724] env[69927]: DEBUG nova.network.neutron [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1028.853634] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Releasing lock "refresh_cache-9c00e485-fd59-4571-abd5-80ca5e3bac1b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1028.950780] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-042dc840-1f62-4f4f-b0c7-8d24d565a69e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.959238] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eef1244-ee46-408e-a372-75215b62812d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.997661] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bad9dd9-0459-41eb-b8ed-a1b0ba001c22 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.007087] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d15e48-391e-4901-ae07-45d055a1faed {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.024675] env[69927]: DEBUG nova.compute.provider_tree [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1029.062280] env[69927]: DEBUG nova.network.neutron [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Updating instance_info_cache with network_info: [{"id": "1d1a9d58-33a0-4ef9-b472-1e66ed5b0450", "address": "fa:16:3e:42:bb:18", "network": {"id": "045356e7-ce71-4c33-9121-8655a915fc1d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1277828099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2fb1fc4c3ae41a5b331c6be7973eb72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d1a9d58-33", "ovs_interfaceid": "1d1a9d58-33a0-4ef9-b472-1e66ed5b0450", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.247652] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1029.248651] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f8de70a-38b0-4743-984a-f9d286d91c34 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.256611] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1029.258026] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f0111958-faa2-4d58-9af1-1d1a78bb85d4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.260908] env[69927]: DEBUG nova.compute.manager [req-bff87ff1-f594-44f6-9f16-d25596cf9a9d req-3ede5029-5a6f-423a-949d-1530e2359146 service nova] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Received event network-vif-unplugged-89f92ae9-a8f4-402a-b248-cc3ad9bf67d7 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1029.261066] env[69927]: DEBUG oslo_concurrency.lockutils [req-bff87ff1-f594-44f6-9f16-d25596cf9a9d req-3ede5029-5a6f-423a-949d-1530e2359146 service nova] Acquiring lock "9c00e485-fd59-4571-abd5-80ca5e3bac1b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1029.261276] env[69927]: DEBUG oslo_concurrency.lockutils [req-bff87ff1-f594-44f6-9f16-d25596cf9a9d req-3ede5029-5a6f-423a-949d-1530e2359146 service nova] Lock "9c00e485-fd59-4571-abd5-80ca5e3bac1b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1029.261441] env[69927]: DEBUG oslo_concurrency.lockutils [req-bff87ff1-f594-44f6-9f16-d25596cf9a9d req-3ede5029-5a6f-423a-949d-1530e2359146 service nova] Lock "9c00e485-fd59-4571-abd5-80ca5e3bac1b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1029.261605] env[69927]: DEBUG nova.compute.manager [req-bff87ff1-f594-44f6-9f16-d25596cf9a9d req-3ede5029-5a6f-423a-949d-1530e2359146 service nova] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] No waiting events found dispatching network-vif-unplugged-89f92ae9-a8f4-402a-b248-cc3ad9bf67d7 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1029.261767] env[69927]: WARNING nova.compute.manager [req-bff87ff1-f594-44f6-9f16-d25596cf9a9d req-3ede5029-5a6f-423a-949d-1530e2359146 service nova] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Received unexpected event network-vif-unplugged-89f92ae9-a8f4-402a-b248-cc3ad9bf67d7 for instance with vm_state shelved and task_state shelving_offloading. [ 1029.337037] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1029.337526] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1029.337926] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Deleting the datastore file [datastore2] 9c00e485-fd59-4571-abd5-80ca5e3bac1b {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1029.338327] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-43d9c2a5-5d94-4f75-8116-368a5e597316 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.345929] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1029.345929] env[69927]: value = "task-4096334" [ 1029.345929] env[69927]: _type = "Task" [ 1029.345929] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.355182] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096334, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.441757] env[69927]: DEBUG nova.compute.manager [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1029.469804] env[69927]: DEBUG nova.virt.hardware [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:42:05Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1541961574',id=36,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-760505190',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1029.470063] env[69927]: DEBUG nova.virt.hardware [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1029.470227] env[69927]: DEBUG nova.virt.hardware [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1029.470608] env[69927]: DEBUG nova.virt.hardware [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1029.470608] env[69927]: DEBUG nova.virt.hardware [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1029.470682] env[69927]: DEBUG nova.virt.hardware [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1029.470881] env[69927]: DEBUG nova.virt.hardware [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1029.471054] env[69927]: DEBUG nova.virt.hardware [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1029.471223] env[69927]: DEBUG nova.virt.hardware [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1029.471385] env[69927]: DEBUG nova.virt.hardware [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1029.471556] env[69927]: DEBUG nova.virt.hardware [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1029.472479] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd804da4-604e-4fc0-a0e9-5bb6e832d348 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.481641] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b81691d2-e505-4249-8b0d-c90b232d6373 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.527331] env[69927]: DEBUG nova.scheduler.client.report [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1029.565196] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Releasing lock "refresh_cache-b750ce2c-ee85-46c6-bf12-edb3f088e6de" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1029.565527] env[69927]: DEBUG nova.compute.manager [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Instance network_info: |[{"id": "1d1a9d58-33a0-4ef9-b472-1e66ed5b0450", "address": "fa:16:3e:42:bb:18", "network": {"id": "045356e7-ce71-4c33-9121-8655a915fc1d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1277828099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2fb1fc4c3ae41a5b331c6be7973eb72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d1a9d58-33", "ovs_interfaceid": "1d1a9d58-33a0-4ef9-b472-1e66ed5b0450", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1029.566342] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:bb:18', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40c947c4-f471-4d48-8e43-fee54198107e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1d1a9d58-33a0-4ef9-b472-1e66ed5b0450', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1029.574018] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1029.574018] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1029.574018] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-36bf9a38-5556-419a-8503-84ee496de01e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.594729] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1029.594729] env[69927]: value = "task-4096335" [ 1029.594729] env[69927]: _type = "Task" [ 1029.594729] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.606191] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096335, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.699784] env[69927]: DEBUG nova.compute.manager [req-9cf848aa-f034-46c4-af75-c19f41009d39 req-6cc7b7ff-edca-4250-95fa-a148a92b36db service nova] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Received event network-changed-1d1a9d58-33a0-4ef9-b472-1e66ed5b0450 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1029.700159] env[69927]: DEBUG nova.compute.manager [req-9cf848aa-f034-46c4-af75-c19f41009d39 req-6cc7b7ff-edca-4250-95fa-a148a92b36db service nova] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Refreshing instance network info cache due to event network-changed-1d1a9d58-33a0-4ef9-b472-1e66ed5b0450. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1029.700220] env[69927]: DEBUG oslo_concurrency.lockutils [req-9cf848aa-f034-46c4-af75-c19f41009d39 req-6cc7b7ff-edca-4250-95fa-a148a92b36db service nova] Acquiring lock "refresh_cache-b750ce2c-ee85-46c6-bf12-edb3f088e6de" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.700362] env[69927]: DEBUG oslo_concurrency.lockutils [req-9cf848aa-f034-46c4-af75-c19f41009d39 req-6cc7b7ff-edca-4250-95fa-a148a92b36db service nova] Acquired lock "refresh_cache-b750ce2c-ee85-46c6-bf12-edb3f088e6de" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1029.700525] env[69927]: DEBUG nova.network.neutron [req-9cf848aa-f034-46c4-af75-c19f41009d39 req-6cc7b7ff-edca-4250-95fa-a148a92b36db service nova] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Refreshing network info cache for port 1d1a9d58-33a0-4ef9-b472-1e66ed5b0450 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1029.740426] env[69927]: DEBUG nova.network.neutron [None req-50a623fd-926e-48f9-8e25-3467fb717f1e tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Updating instance_info_cache with network_info: [{"id": "fc4d69e0-0a53-4c34-8f56-6416a884b018", "address": "fa:16:3e:f8:cb:f0", "network": {"id": "625b5644-8a8f-4789-8c96-083982c1bf4c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-124597975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61b1aea0ccf049c8942ba32932412497", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc4d69e0-0a", "ovs_interfaceid": "fc4d69e0-0a53-4c34-8f56-6416a884b018", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.860588] env[69927]: DEBUG oslo_vmware.api [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096334, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.429419} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.860994] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1029.861162] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1029.861409] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1029.892865] env[69927]: INFO nova.scheduler.client.report [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Deleted allocations for instance 9c00e485-fd59-4571-abd5-80ca5e3bac1b [ 1030.032641] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.616s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.033319] env[69927]: DEBUG nova.compute.manager [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1030.035951] env[69927]: DEBUG oslo_concurrency.lockutils [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.021s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.036221] env[69927]: DEBUG nova.objects.instance [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Lazy-loading 'resources' on Instance uuid e0bca101-cf8d-48e1-a331-b0018548593e {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1030.108411] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096335, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.248140] env[69927]: DEBUG oslo_concurrency.lockutils [None req-50a623fd-926e-48f9-8e25-3467fb717f1e tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Releasing lock "refresh_cache-5581f8af-9796-48ad-a2f3-557e90d9662a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1030.248534] env[69927]: DEBUG nova.objects.instance [None req-50a623fd-926e-48f9-8e25-3467fb717f1e tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lazy-loading 'migration_context' on Instance uuid 5581f8af-9796-48ad-a2f3-557e90d9662a {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1030.400026] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.469137] env[69927]: DEBUG nova.network.neutron [req-9cf848aa-f034-46c4-af75-c19f41009d39 req-6cc7b7ff-edca-4250-95fa-a148a92b36db service nova] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Updated VIF entry in instance network info cache for port 1d1a9d58-33a0-4ef9-b472-1e66ed5b0450. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1030.469137] env[69927]: DEBUG nova.network.neutron [req-9cf848aa-f034-46c4-af75-c19f41009d39 req-6cc7b7ff-edca-4250-95fa-a148a92b36db service nova] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Updating instance_info_cache with network_info: [{"id": "1d1a9d58-33a0-4ef9-b472-1e66ed5b0450", "address": "fa:16:3e:42:bb:18", "network": {"id": "045356e7-ce71-4c33-9121-8655a915fc1d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1277828099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2fb1fc4c3ae41a5b331c6be7973eb72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d1a9d58-33", "ovs_interfaceid": "1d1a9d58-33a0-4ef9-b472-1e66ed5b0450", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.541035] env[69927]: DEBUG nova.compute.utils [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1030.545301] env[69927]: DEBUG nova.compute.manager [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1030.545964] env[69927]: DEBUG nova.network.neutron [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1030.586964] env[69927]: DEBUG nova.policy [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '51ec047c72c8450abe8f553c52a847f4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef85ff9fc3d240a8a24b6cea8dda0f6f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1030.611829] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096335, 'name': CreateVM_Task, 'duration_secs': 0.692463} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.612242] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1030.613519] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c87f5a71-3793-40fd-ad59-7b537237d25e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.613821] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c87f5a71-3793-40fd-ad59-7b537237d25e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1030.614754] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c87f5a71-3793-40fd-ad59-7b537237d25e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1030.617433] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-311ee0c1-854c-451f-870f-45adcad991d4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.623030] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1030.623030] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5271f8bd-7757-c46a-7037-713d902c3ab9" [ 1030.623030] env[69927]: _type = "Task" [ 1030.623030] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.633675] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5271f8bd-7757-c46a-7037-713d902c3ab9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.753706] env[69927]: DEBUG nova.objects.base [None req-50a623fd-926e-48f9-8e25-3467fb717f1e tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Object Instance<5581f8af-9796-48ad-a2f3-557e90d9662a> lazy-loaded attributes: info_cache,migration_context {{(pid=69927) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1030.754896] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59bfd78d-379e-4aba-a0d1-719d792c035f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.781942] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fa19964-bb33-4556-a32b-6453643639cd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.793892] env[69927]: DEBUG oslo_vmware.api [None req-50a623fd-926e-48f9-8e25-3467fb717f1e tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1030.793892] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b8afb3-bb27-cb46-3b37-263473185d86" [ 1030.793892] env[69927]: _type = "Task" [ 1030.793892] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.806494] env[69927]: DEBUG oslo_vmware.api [None req-50a623fd-926e-48f9-8e25-3467fb717f1e tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b8afb3-bb27-cb46-3b37-263473185d86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.867159] env[69927]: DEBUG nova.network.neutron [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Successfully updated port: 8c5792b5-80a0-4414-bb3b-ae6e25874202 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1030.886885] env[69927]: DEBUG nova.network.neutron [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Successfully created port: eb2105ba-0276-4bc6-a2af-933090d4cdcd {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1030.971871] env[69927]: DEBUG oslo_concurrency.lockutils [req-9cf848aa-f034-46c4-af75-c19f41009d39 req-6cc7b7ff-edca-4250-95fa-a148a92b36db service nova] Releasing lock "refresh_cache-b750ce2c-ee85-46c6-bf12-edb3f088e6de" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1030.991120] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6510a9b4-e9be-450d-8b80-9f2c32474a06 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.999643] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37459e81-296c-4ba1-8968-626ef972bd15 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.039050] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c640af99-b4ad-4974-b72f-edf2cb446682 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.046422] env[69927]: DEBUG nova.compute.manager [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1031.050273] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b283e6-c38a-4daa-9e09-f2f7c893debe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.067184] env[69927]: DEBUG nova.compute.provider_tree [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.134535] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c87f5a71-3793-40fd-ad59-7b537237d25e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1031.135027] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Processing image c87f5a71-3793-40fd-ad59-7b537237d25e {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1031.135446] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c87f5a71-3793-40fd-ad59-7b537237d25e/c87f5a71-3793-40fd-ad59-7b537237d25e.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.135649] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c87f5a71-3793-40fd-ad59-7b537237d25e/c87f5a71-3793-40fd-ad59-7b537237d25e.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1031.136422] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1031.136699] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c7b19cb-d66b-4cb1-a2a8-c791fef8bb4c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.150312] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1031.150528] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1031.151327] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a9c5e14-a9d9-47e9-b163-6b1f54bb93ea {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.157699] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1031.157699] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522c6cea-cdba-ff41-875e-e775dc2b0a36" [ 1031.157699] env[69927]: _type = "Task" [ 1031.157699] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.167415] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522c6cea-cdba-ff41-875e-e775dc2b0a36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.292828] env[69927]: DEBUG nova.compute.manager [req-5aff3f10-8208-455f-9644-7777096ca203 req-7135d966-60a4-489f-9e72-6a64f3001647 service nova] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Received event network-changed-89f92ae9-a8f4-402a-b248-cc3ad9bf67d7 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1031.293465] env[69927]: DEBUG nova.compute.manager [req-5aff3f10-8208-455f-9644-7777096ca203 req-7135d966-60a4-489f-9e72-6a64f3001647 service nova] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Refreshing instance network info cache due to event network-changed-89f92ae9-a8f4-402a-b248-cc3ad9bf67d7. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1031.293465] env[69927]: DEBUG oslo_concurrency.lockutils [req-5aff3f10-8208-455f-9644-7777096ca203 req-7135d966-60a4-489f-9e72-6a64f3001647 service nova] Acquiring lock "refresh_cache-9c00e485-fd59-4571-abd5-80ca5e3bac1b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.293715] env[69927]: DEBUG oslo_concurrency.lockutils [req-5aff3f10-8208-455f-9644-7777096ca203 req-7135d966-60a4-489f-9e72-6a64f3001647 service nova] Acquired lock "refresh_cache-9c00e485-fd59-4571-abd5-80ca5e3bac1b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1031.293804] env[69927]: DEBUG nova.network.neutron [req-5aff3f10-8208-455f-9644-7777096ca203 req-7135d966-60a4-489f-9e72-6a64f3001647 service nova] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Refreshing network info cache for port 89f92ae9-a8f4-402a-b248-cc3ad9bf67d7 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1031.307025] env[69927]: DEBUG oslo_vmware.api [None req-50a623fd-926e-48f9-8e25-3467fb717f1e tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b8afb3-bb27-cb46-3b37-263473185d86, 'name': SearchDatastore_Task, 'duration_secs': 0.017323} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.307025] env[69927]: DEBUG oslo_concurrency.lockutils [None req-50a623fd-926e-48f9-8e25-3467fb717f1e tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.372337] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Acquiring lock "refresh_cache-ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.372522] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Acquired lock "refresh_cache-ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1031.372958] env[69927]: DEBUG nova.network.neutron [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1031.570958] env[69927]: DEBUG nova.scheduler.client.report [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1031.669663] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Preparing fetch location {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1031.669952] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Fetch image to [datastore1] OSTACK_IMG_fc1e0a1e-6c45-4307-8e04-2aeae5f05726/OSTACK_IMG_fc1e0a1e-6c45-4307-8e04-2aeae5f05726.vmdk {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1031.672028] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Downloading stream optimized image c87f5a71-3793-40fd-ad59-7b537237d25e to [datastore1] OSTACK_IMG_fc1e0a1e-6c45-4307-8e04-2aeae5f05726/OSTACK_IMG_fc1e0a1e-6c45-4307-8e04-2aeae5f05726.vmdk on the data store datastore1 as vApp {{(pid=69927) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1031.672028] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Downloading image file data c87f5a71-3793-40fd-ad59-7b537237d25e to the ESX as VM named 'OSTACK_IMG_fc1e0a1e-6c45-4307-8e04-2aeae5f05726' {{(pid=69927) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1031.751038] env[69927]: DEBUG oslo_vmware.rw_handles [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1031.751038] env[69927]: value = "resgroup-9" [ 1031.751038] env[69927]: _type = "ResourcePool" [ 1031.751038] env[69927]: }. {{(pid=69927) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1031.751038] env[69927]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-505eca32-6be9-4130-b8ba-18719027eebf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.775122] env[69927]: DEBUG oslo_vmware.rw_handles [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lease: (returnval){ [ 1031.775122] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52819b7c-757e-2c51-8bcd-a145eae89e2b" [ 1031.775122] env[69927]: _type = "HttpNfcLease" [ 1031.775122] env[69927]: } obtained for vApp import into resource pool (val){ [ 1031.775122] env[69927]: value = "resgroup-9" [ 1031.775122] env[69927]: _type = "ResourcePool" [ 1031.775122] env[69927]: }. {{(pid=69927) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1031.775122] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the lease: (returnval){ [ 1031.775122] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52819b7c-757e-2c51-8bcd-a145eae89e2b" [ 1031.775122] env[69927]: _type = "HttpNfcLease" [ 1031.775122] env[69927]: } to be ready. {{(pid=69927) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1031.783015] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1031.783015] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52819b7c-757e-2c51-8bcd-a145eae89e2b" [ 1031.783015] env[69927]: _type = "HttpNfcLease" [ 1031.783015] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1031.872935] env[69927]: DEBUG nova.compute.manager [req-08daa6c2-1940-491f-bfd5-f41b921eb47d req-87bc9379-b7a9-4588-9cbc-899edb905801 service nova] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Received event network-vif-plugged-8c5792b5-80a0-4414-bb3b-ae6e25874202 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1031.872935] env[69927]: DEBUG oslo_concurrency.lockutils [req-08daa6c2-1940-491f-bfd5-f41b921eb47d req-87bc9379-b7a9-4588-9cbc-899edb905801 service nova] Acquiring lock "ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.872935] env[69927]: DEBUG oslo_concurrency.lockutils [req-08daa6c2-1940-491f-bfd5-f41b921eb47d req-87bc9379-b7a9-4588-9cbc-899edb905801 service nova] Lock "ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1031.872935] env[69927]: DEBUG oslo_concurrency.lockutils [req-08daa6c2-1940-491f-bfd5-f41b921eb47d req-87bc9379-b7a9-4588-9cbc-899edb905801 service nova] Lock "ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1031.872935] env[69927]: DEBUG nova.compute.manager [req-08daa6c2-1940-491f-bfd5-f41b921eb47d req-87bc9379-b7a9-4588-9cbc-899edb905801 service nova] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] No waiting events found dispatching network-vif-plugged-8c5792b5-80a0-4414-bb3b-ae6e25874202 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1031.872935] env[69927]: WARNING nova.compute.manager [req-08daa6c2-1940-491f-bfd5-f41b921eb47d req-87bc9379-b7a9-4588-9cbc-899edb905801 service nova] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Received unexpected event network-vif-plugged-8c5792b5-80a0-4414-bb3b-ae6e25874202 for instance with vm_state building and task_state spawning. [ 1031.872935] env[69927]: DEBUG nova.compute.manager [req-08daa6c2-1940-491f-bfd5-f41b921eb47d req-87bc9379-b7a9-4588-9cbc-899edb905801 service nova] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Received event network-changed-8c5792b5-80a0-4414-bb3b-ae6e25874202 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1031.872935] env[69927]: DEBUG nova.compute.manager [req-08daa6c2-1940-491f-bfd5-f41b921eb47d req-87bc9379-b7a9-4588-9cbc-899edb905801 service nova] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Refreshing instance network info cache due to event network-changed-8c5792b5-80a0-4414-bb3b-ae6e25874202. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1031.873543] env[69927]: DEBUG oslo_concurrency.lockutils [req-08daa6c2-1940-491f-bfd5-f41b921eb47d req-87bc9379-b7a9-4588-9cbc-899edb905801 service nova] Acquiring lock "refresh_cache-ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.918658] env[69927]: DEBUG nova.network.neutron [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1031.983891] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2c42dd5f-8725-4acb-b963-b422b0f092c4 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "9c00e485-fd59-4571-abd5-80ca5e3bac1b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.062850] env[69927]: DEBUG nova.compute.manager [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1032.071326] env[69927]: DEBUG nova.network.neutron [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Updating instance_info_cache with network_info: [{"id": "8c5792b5-80a0-4414-bb3b-ae6e25874202", "address": "fa:16:3e:26:f8:b0", "network": {"id": "59c5a413-ed97-4651-a37f-e9ed6e46972e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1832571141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fa2f3f0abc7474196dfbee4f8c09d3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4a9e02-45f1-4afb-8abb-0de26b153086", "external-id": "nsx-vlan-transportzone-336", "segmentation_id": 336, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c5792b5-80", "ovs_interfaceid": "8c5792b5-80a0-4414-bb3b-ae6e25874202", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.073031] env[69927]: DEBUG nova.network.neutron [req-5aff3f10-8208-455f-9644-7777096ca203 req-7135d966-60a4-489f-9e72-6a64f3001647 service nova] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Updated VIF entry in instance network info cache for port 89f92ae9-a8f4-402a-b248-cc3ad9bf67d7. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1032.073339] env[69927]: DEBUG nova.network.neutron [req-5aff3f10-8208-455f-9644-7777096ca203 req-7135d966-60a4-489f-9e72-6a64f3001647 service nova] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Updating instance_info_cache with network_info: [{"id": "89f92ae9-a8f4-402a-b248-cc3ad9bf67d7", "address": "fa:16:3e:c4:9b:91", "network": {"id": "2cac2664-2b1d-4bb6-a58a-f8e96679d038", "bridge": null, "label": "tempest-DeleteServersTestJSON-1945522269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3afde63c8cbe4aecb32a470fd6b948f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap89f92ae9-a8", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.079269] env[69927]: DEBUG oslo_concurrency.lockutils [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.043s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.083349] env[69927]: DEBUG nova.virt.hardware [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1032.083587] env[69927]: DEBUG nova.virt.hardware [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1032.083740] env[69927]: DEBUG nova.virt.hardware [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1032.083920] env[69927]: DEBUG nova.virt.hardware [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1032.084086] env[69927]: DEBUG nova.virt.hardware [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1032.084281] env[69927]: DEBUG nova.virt.hardware [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1032.084500] env[69927]: DEBUG nova.virt.hardware [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1032.084661] env[69927]: DEBUG nova.virt.hardware [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1032.084836] env[69927]: DEBUG nova.virt.hardware [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1032.085038] env[69927]: DEBUG nova.virt.hardware [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1032.085218] env[69927]: DEBUG nova.virt.hardware [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1032.085551] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.541s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.085895] env[69927]: DEBUG nova.objects.instance [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Lazy-loading 'resources' on Instance uuid 74ea584f-b20f-425b-acb3-0ec60e7f2a1e {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1032.088565] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5ec0c1f-368c-4bd6-92d7-d10381587c40 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.102214] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4299f75c-03ef-457e-9a06-bdfbbe8890f4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.107346] env[69927]: INFO nova.scheduler.client.report [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Deleted allocations for instance e0bca101-cf8d-48e1-a331-b0018548593e [ 1032.282677] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1032.282677] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52819b7c-757e-2c51-8bcd-a145eae89e2b" [ 1032.282677] env[69927]: _type = "HttpNfcLease" [ 1032.282677] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1032.531285] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bfd0f7e-58fe-41f4-8286-4ae56b210366 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.539272] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97aef1ad-229d-4558-a372-d3d9973ea19f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.571505] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e28c61d4-64ca-4367-ab1c-6e38f1afabd1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.575874] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Releasing lock "refresh_cache-ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1032.576197] env[69927]: DEBUG nova.compute.manager [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Instance network_info: |[{"id": "8c5792b5-80a0-4414-bb3b-ae6e25874202", "address": "fa:16:3e:26:f8:b0", "network": {"id": "59c5a413-ed97-4651-a37f-e9ed6e46972e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1832571141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fa2f3f0abc7474196dfbee4f8c09d3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4a9e02-45f1-4afb-8abb-0de26b153086", "external-id": "nsx-vlan-transportzone-336", "segmentation_id": 336, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c5792b5-80", "ovs_interfaceid": "8c5792b5-80a0-4414-bb3b-ae6e25874202", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1032.576604] env[69927]: DEBUG oslo_concurrency.lockutils [req-5aff3f10-8208-455f-9644-7777096ca203 req-7135d966-60a4-489f-9e72-6a64f3001647 service nova] Releasing lock "refresh_cache-9c00e485-fd59-4571-abd5-80ca5e3bac1b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1032.578905] env[69927]: DEBUG oslo_concurrency.lockutils [req-08daa6c2-1940-491f-bfd5-f41b921eb47d req-87bc9379-b7a9-4588-9cbc-899edb905801 service nova] Acquired lock "refresh_cache-ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1032.579299] env[69927]: DEBUG nova.network.neutron [req-08daa6c2-1940-491f-bfd5-f41b921eb47d req-87bc9379-b7a9-4588-9cbc-899edb905801 service nova] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Refreshing network info cache for port 8c5792b5-80a0-4414-bb3b-ae6e25874202 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1032.580467] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:f8:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea4a9e02-45f1-4afb-8abb-0de26b153086', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8c5792b5-80a0-4414-bb3b-ae6e25874202', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1032.588428] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Creating folder: Project (5fa2f3f0abc7474196dfbee4f8c09d3f). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1032.588933] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a459f75-b14d-49a8-927f-9e1e736ea0c7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.591547] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b635ff07-aaf8-4528-b1ed-f936e3792c9b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.608551] env[69927]: DEBUG nova.compute.provider_tree [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1032.611347] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Created folder: Project (5fa2f3f0abc7474196dfbee4f8c09d3f) in parent group-v811283. [ 1032.611547] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Creating folder: Instances. Parent ref: group-v811508. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1032.611789] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-062a22d9-f138-4121-b8cb-cb63050e874c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.624939] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Created folder: Instances in parent group-v811508. [ 1032.625327] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1032.625793] env[69927]: DEBUG oslo_concurrency.lockutils [None req-69b1c369-ac99-45da-b319-7fb2ccdd8df1 tempest-SecurityGroupsTestJSON-258323803 tempest-SecurityGroupsTestJSON-258323803-project-member] Lock "e0bca101-cf8d-48e1-a331-b0018548593e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.117s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.626764] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1032.627377] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1b09a73d-ea29-47d0-9b7e-814a3f7fd48d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.651134] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1032.651134] env[69927]: value = "task-4096339" [ 1032.651134] env[69927]: _type = "Task" [ 1032.651134] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.659587] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096339, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.666554] env[69927]: DEBUG nova.network.neutron [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Successfully updated port: eb2105ba-0276-4bc6-a2af-933090d4cdcd {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1032.783059] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1032.783059] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52819b7c-757e-2c51-8bcd-a145eae89e2b" [ 1032.783059] env[69927]: _type = "HttpNfcLease" [ 1032.783059] env[69927]: } is ready. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1032.783409] env[69927]: DEBUG oslo_vmware.rw_handles [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1032.783409] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52819b7c-757e-2c51-8bcd-a145eae89e2b" [ 1032.783409] env[69927]: _type = "HttpNfcLease" [ 1032.783409] env[69927]: }. {{(pid=69927) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1032.784274] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68574efc-b4a4-47a8-80df-bec78ff3ae2c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.791679] env[69927]: DEBUG oslo_vmware.rw_handles [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fc83aa-1144-8d60-c60e-d7d660af33b4/disk-0.vmdk from lease info. {{(pid=69927) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1032.791872] env[69927]: DEBUG oslo_vmware.rw_handles [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fc83aa-1144-8d60-c60e-d7d660af33b4/disk-0.vmdk. {{(pid=69927) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1032.855669] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a5d09547-2c38-4152-8beb-3ced8d18540f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.118020] env[69927]: DEBUG nova.scheduler.client.report [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1033.166743] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096339, 'name': CreateVM_Task, 'duration_secs': 0.41927} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.169697] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1033.170714] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquiring lock "refresh_cache-cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.171174] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquired lock "refresh_cache-cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.171494] env[69927]: DEBUG nova.network.neutron [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1033.176603] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.176985] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.177477] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1033.181781] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81fe9078-f179-4dd0-9159-cfcac6e14fd9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.191257] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for the task: (returnval){ [ 1033.191257] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528d7a33-72ba-5592-f5f4-000fc5f15dd0" [ 1033.191257] env[69927]: _type = "Task" [ 1033.191257] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.208442] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528d7a33-72ba-5592-f5f4-000fc5f15dd0, 'name': SearchDatastore_Task, 'duration_secs': 0.012113} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.211711] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.212050] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1033.212635] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.212635] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.212797] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1033.213251] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95aaec51-1d2d-437c-9069-21b807d8d2c0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.228172] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1033.228172] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1033.228172] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6183c97-11e5-47da-b275-421bdb63ba2c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.238562] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for the task: (returnval){ [ 1033.238562] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b2c70a-ca20-da90-2fb8-d2bd3579831e" [ 1033.238562] env[69927]: _type = "Task" [ 1033.238562] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.256056] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b2c70a-ca20-da90-2fb8-d2bd3579831e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.386474] env[69927]: DEBUG nova.network.neutron [req-08daa6c2-1940-491f-bfd5-f41b921eb47d req-87bc9379-b7a9-4588-9cbc-899edb905801 service nova] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Updated VIF entry in instance network info cache for port 8c5792b5-80a0-4414-bb3b-ae6e25874202. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1033.386838] env[69927]: DEBUG nova.network.neutron [req-08daa6c2-1940-491f-bfd5-f41b921eb47d req-87bc9379-b7a9-4588-9cbc-899edb905801 service nova] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Updating instance_info_cache with network_info: [{"id": "8c5792b5-80a0-4414-bb3b-ae6e25874202", "address": "fa:16:3e:26:f8:b0", "network": {"id": "59c5a413-ed97-4651-a37f-e9ed6e46972e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1832571141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fa2f3f0abc7474196dfbee4f8c09d3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4a9e02-45f1-4afb-8abb-0de26b153086", "external-id": "nsx-vlan-transportzone-336", "segmentation_id": 336, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c5792b5-80", "ovs_interfaceid": "8c5792b5-80a0-4414-bb3b-ae6e25874202", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.412734] env[69927]: DEBUG nova.compute.manager [req-59f6d65e-790f-4e83-8832-dfc6f6de7c0e req-4df87aba-f0d8-4609-acc8-a913a58418c0 service nova] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Received event network-vif-plugged-eb2105ba-0276-4bc6-a2af-933090d4cdcd {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1033.412961] env[69927]: DEBUG oslo_concurrency.lockutils [req-59f6d65e-790f-4e83-8832-dfc6f6de7c0e req-4df87aba-f0d8-4609-acc8-a913a58418c0 service nova] Acquiring lock "cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1033.413450] env[69927]: DEBUG oslo_concurrency.lockutils [req-59f6d65e-790f-4e83-8832-dfc6f6de7c0e req-4df87aba-f0d8-4609-acc8-a913a58418c0 service nova] Lock "cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1033.413699] env[69927]: DEBUG oslo_concurrency.lockutils [req-59f6d65e-790f-4e83-8832-dfc6f6de7c0e req-4df87aba-f0d8-4609-acc8-a913a58418c0 service nova] Lock "cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1033.413990] env[69927]: DEBUG nova.compute.manager [req-59f6d65e-790f-4e83-8832-dfc6f6de7c0e req-4df87aba-f0d8-4609-acc8-a913a58418c0 service nova] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] No waiting events found dispatching network-vif-plugged-eb2105ba-0276-4bc6-a2af-933090d4cdcd {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1033.414327] env[69927]: WARNING nova.compute.manager [req-59f6d65e-790f-4e83-8832-dfc6f6de7c0e req-4df87aba-f0d8-4609-acc8-a913a58418c0 service nova] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Received unexpected event network-vif-plugged-eb2105ba-0276-4bc6-a2af-933090d4cdcd for instance with vm_state building and task_state spawning. [ 1033.414593] env[69927]: DEBUG nova.compute.manager [req-59f6d65e-790f-4e83-8832-dfc6f6de7c0e req-4df87aba-f0d8-4609-acc8-a913a58418c0 service nova] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Received event network-changed-eb2105ba-0276-4bc6-a2af-933090d4cdcd {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1033.414887] env[69927]: DEBUG nova.compute.manager [req-59f6d65e-790f-4e83-8832-dfc6f6de7c0e req-4df87aba-f0d8-4609-acc8-a913a58418c0 service nova] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Refreshing instance network info cache due to event network-changed-eb2105ba-0276-4bc6-a2af-933090d4cdcd. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1033.415128] env[69927]: DEBUG oslo_concurrency.lockutils [req-59f6d65e-790f-4e83-8832-dfc6f6de7c0e req-4df87aba-f0d8-4609-acc8-a913a58418c0 service nova] Acquiring lock "refresh_cache-cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.623975] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.538s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1033.627788] env[69927]: DEBUG oslo_concurrency.lockutils [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.070s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1033.631019] env[69927]: DEBUG oslo_concurrency.lockutils [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1033.631019] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.457s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1033.631019] env[69927]: DEBUG nova.objects.instance [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Lazy-loading 'resources' on Instance uuid 01c8eb3b-bf30-4b00-af71-e32f0dc19171 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1033.656908] env[69927]: INFO nova.scheduler.client.report [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Deleted allocations for instance c3e8a429-8484-4b11-abe3-1cccf0992556 [ 1033.663710] env[69927]: INFO nova.scheduler.client.report [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Deleted allocations for instance 74ea584f-b20f-425b-acb3-0ec60e7f2a1e [ 1033.669246] env[69927]: DEBUG oslo_vmware.rw_handles [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Completed reading data from the image iterator. {{(pid=69927) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1033.669493] env[69927]: DEBUG oslo_vmware.rw_handles [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fc83aa-1144-8d60-c60e-d7d660af33b4/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1033.670527] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84ae2be-d11a-475c-b214-a99fc3ee15b5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.684826] env[69927]: DEBUG oslo_vmware.rw_handles [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fc83aa-1144-8d60-c60e-d7d660af33b4/disk-0.vmdk is in state: ready. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1033.684826] env[69927]: DEBUG oslo_vmware.rw_handles [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fc83aa-1144-8d60-c60e-d7d660af33b4/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1033.684826] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-140488bb-4a69-463d-b4ac-c66113097307 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.723773] env[69927]: DEBUG nova.network.neutron [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1033.755939] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b2c70a-ca20-da90-2fb8-d2bd3579831e, 'name': SearchDatastore_Task, 'duration_secs': 0.015771} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.756875] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21835d3a-d1e4-4a71-a892-2fba1bee62dd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.768364] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for the task: (returnval){ [ 1033.768364] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521a8b29-e88b-2f91-3ee3-b20b6c6db43a" [ 1033.768364] env[69927]: _type = "Task" [ 1033.768364] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.778140] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521a8b29-e88b-2f91-3ee3-b20b6c6db43a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.890465] env[69927]: DEBUG oslo_concurrency.lockutils [req-08daa6c2-1940-491f-bfd5-f41b921eb47d req-87bc9379-b7a9-4588-9cbc-899edb905801 service nova] Releasing lock "refresh_cache-ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.967328] env[69927]: DEBUG nova.network.neutron [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Updating instance_info_cache with network_info: [{"id": "eb2105ba-0276-4bc6-a2af-933090d4cdcd", "address": "fa:16:3e:23:0c:58", "network": {"id": "16f178eb-5c9f-4d2d-bde1-6816bb4e832b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1614734358-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef85ff9fc3d240a8a24b6cea8dda0f6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb2105ba-02", "ovs_interfaceid": "eb2105ba-0276-4bc6-a2af-933090d4cdcd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.172710] env[69927]: DEBUG oslo_concurrency.lockutils [None req-46d05cda-10b2-427b-adad-58d42874e01f tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "c3e8a429-8484-4b11-abe3-1cccf0992556" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.985s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.181804] env[69927]: DEBUG oslo_vmware.rw_handles [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fc83aa-1144-8d60-c60e-d7d660af33b4/disk-0.vmdk. {{(pid=69927) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1034.183876] env[69927]: INFO nova.virt.vmwareapi.images [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Downloaded image file data c87f5a71-3793-40fd-ad59-7b537237d25e [ 1034.189272] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5e029f6e-c0bd-42ea-97bc-e0e6d418b90a tempest-AttachInterfacesV270Test-499421490 tempest-AttachInterfacesV270Test-499421490-project-member] Lock "74ea584f-b20f-425b-acb3-0ec60e7f2a1e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.597s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.190966] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c48b0ca-ec85-4160-af25-894152345349 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.216956] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d4948770-22a3-4597-8109-f64bf9dbb020 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.245062] env[69927]: INFO nova.virt.vmwareapi.images [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] The imported VM was unregistered [ 1034.248051] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Caching image {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1034.248167] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Creating directory with path [datastore1] devstack-image-cache_base/c87f5a71-3793-40fd-ad59-7b537237d25e {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1034.248793] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-60bd11dc-ae6a-4c41-beb0-cb39fa216a78 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.265460] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Created directory with path [datastore1] devstack-image-cache_base/c87f5a71-3793-40fd-ad59-7b537237d25e {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1034.265706] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_fc1e0a1e-6c45-4307-8e04-2aeae5f05726/OSTACK_IMG_fc1e0a1e-6c45-4307-8e04-2aeae5f05726.vmdk to [datastore1] devstack-image-cache_base/c87f5a71-3793-40fd-ad59-7b537237d25e/c87f5a71-3793-40fd-ad59-7b537237d25e.vmdk. {{(pid=69927) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1034.266015] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-c7bde61d-5009-4503-9e0e-d15d99f502bd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.281190] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1034.281190] env[69927]: value = "task-4096341" [ 1034.281190] env[69927]: _type = "Task" [ 1034.281190] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.285889] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521a8b29-e88b-2f91-3ee3-b20b6c6db43a, 'name': SearchDatastore_Task, 'duration_secs': 0.024891} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.289560] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.289903] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2/ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1034.290258] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-107ea830-9e1b-47d2-ba0c-1f85fc4e1a83 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.298861] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096341, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.303408] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for the task: (returnval){ [ 1034.303408] env[69927]: value = "task-4096342" [ 1034.303408] env[69927]: _type = "Task" [ 1034.303408] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.314270] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096342, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.469714] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Releasing lock "refresh_cache-cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.470074] env[69927]: DEBUG nova.compute.manager [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Instance network_info: |[{"id": "eb2105ba-0276-4bc6-a2af-933090d4cdcd", "address": "fa:16:3e:23:0c:58", "network": {"id": "16f178eb-5c9f-4d2d-bde1-6816bb4e832b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1614734358-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef85ff9fc3d240a8a24b6cea8dda0f6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb2105ba-02", "ovs_interfaceid": "eb2105ba-0276-4bc6-a2af-933090d4cdcd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1034.470394] env[69927]: DEBUG oslo_concurrency.lockutils [req-59f6d65e-790f-4e83-8832-dfc6f6de7c0e req-4df87aba-f0d8-4609-acc8-a913a58418c0 service nova] Acquired lock "refresh_cache-cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1034.470573] env[69927]: DEBUG nova.network.neutron [req-59f6d65e-790f-4e83-8832-dfc6f6de7c0e req-4df87aba-f0d8-4609-acc8-a913a58418c0 service nova] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Refreshing network info cache for port eb2105ba-0276-4bc6-a2af-933090d4cdcd {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1034.471821] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:0c:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dd7d0d95-6848-4e69-ac21-75f8db82a3b5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eb2105ba-0276-4bc6-a2af-933090d4cdcd', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1034.483595] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Creating folder: Project (ef85ff9fc3d240a8a24b6cea8dda0f6f). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1034.490255] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1e27c283-e1ac-45d8-ae10-c24bd0e99ff8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.508927] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Created folder: Project (ef85ff9fc3d240a8a24b6cea8dda0f6f) in parent group-v811283. [ 1034.508927] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Creating folder: Instances. Parent ref: group-v811511. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1034.511795] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47454ab7-2742-4c0c-8dc1-460c762c7a34 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.532119] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Created folder: Instances in parent group-v811511. [ 1034.532119] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1034.532354] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1034.532556] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6523543a-327a-4720-8d14-5fd0630e3e2f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.562509] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1034.562509] env[69927]: value = "task-4096345" [ 1034.562509] env[69927]: _type = "Task" [ 1034.562509] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.576600] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096345, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.682778] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c20843-2248-46c9-852f-a9051d68bac3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.697546] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06cd156c-03e2-4004-a651-3cddc5c87681 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.745830] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2903d8e-f16d-4e8c-a136-62074fd60d55 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.760927] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31e437b2-b871-4a16-a8e6-693f79faf193 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.784243] env[69927]: DEBUG nova.compute.provider_tree [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1034.802568] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096341, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.821789] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096342, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.958671] env[69927]: DEBUG nova.network.neutron [req-59f6d65e-790f-4e83-8832-dfc6f6de7c0e req-4df87aba-f0d8-4609-acc8-a913a58418c0 service nova] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Updated VIF entry in instance network info cache for port eb2105ba-0276-4bc6-a2af-933090d4cdcd. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1034.959343] env[69927]: DEBUG nova.network.neutron [req-59f6d65e-790f-4e83-8832-dfc6f6de7c0e req-4df87aba-f0d8-4609-acc8-a913a58418c0 service nova] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Updating instance_info_cache with network_info: [{"id": "eb2105ba-0276-4bc6-a2af-933090d4cdcd", "address": "fa:16:3e:23:0c:58", "network": {"id": "16f178eb-5c9f-4d2d-bde1-6816bb4e832b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1614734358-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef85ff9fc3d240a8a24b6cea8dda0f6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb2105ba-02", "ovs_interfaceid": "eb2105ba-0276-4bc6-a2af-933090d4cdcd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.078939] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096345, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.291387] env[69927]: DEBUG nova.scheduler.client.report [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1035.311202] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096341, 'name': MoveVirtualDisk_Task} progress is 32%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.321905] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096342, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.465269] env[69927]: DEBUG oslo_concurrency.lockutils [req-59f6d65e-790f-4e83-8832-dfc6f6de7c0e req-4df87aba-f0d8-4609-acc8-a913a58418c0 service nova] Releasing lock "refresh_cache-cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.542310] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "9348e368-cc3c-4bde-91ae-26fd03ad536a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1035.542700] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "9348e368-cc3c-4bde-91ae-26fd03ad536a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.542937] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "9348e368-cc3c-4bde-91ae-26fd03ad536a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1035.543147] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "9348e368-cc3c-4bde-91ae-26fd03ad536a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.543318] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "9348e368-cc3c-4bde-91ae-26fd03ad536a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.545583] env[69927]: INFO nova.compute.manager [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Terminating instance [ 1035.583774] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096345, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.801105] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.170s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.803359] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096341, 'name': MoveVirtualDisk_Task} progress is 52%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.808887] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 31.439s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.808887] env[69927]: DEBUG nova.objects.instance [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69927) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1035.823620] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096342, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.273015} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.823930] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2/ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1035.824200] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1035.824470] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5b6e46e5-18fd-41ea-bf0b-c0469a26c47b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.834542] env[69927]: INFO nova.scheduler.client.report [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Deleted allocations for instance 01c8eb3b-bf30-4b00-af71-e32f0dc19171 [ 1035.836970] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for the task: (returnval){ [ 1035.836970] env[69927]: value = "task-4096346" [ 1035.836970] env[69927]: _type = "Task" [ 1035.836970] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.854065] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096346, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.051199] env[69927]: DEBUG nova.compute.manager [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1036.051199] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1036.051665] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f9ce4b-91d5-425f-9d49-53128a8be0e8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.064292] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1036.064593] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-57a212af-fc38-4a69-8f41-4fe6c868d868 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.075887] env[69927]: DEBUG oslo_vmware.api [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 1036.075887] env[69927]: value = "task-4096347" [ 1036.075887] env[69927]: _type = "Task" [ 1036.075887] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.080710] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096345, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.097082] env[69927]: DEBUG oslo_vmware.api [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096347, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.307504] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096341, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.352705] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d30759a9-9f1a-4c06-bbd6-1c300aac6e4a tempest-InstanceActionsTestJSON-1460553136 tempest-InstanceActionsTestJSON-1460553136-project-member] Lock "01c8eb3b-bf30-4b00-af71-e32f0dc19171" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.118s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1036.359021] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096346, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.582797] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096345, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.594729] env[69927]: DEBUG oslo_vmware.api [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096347, 'name': PowerOffVM_Task, 'duration_secs': 0.262434} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.595072] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1036.595257] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1036.595526] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1ac3526c-2c84-4050-900d-418381ead726 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.688375] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1036.688375] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1036.688375] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Deleting the datastore file [datastore2] 9348e368-cc3c-4bde-91ae-26fd03ad536a {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1036.688375] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cccc3c4b-d746-462f-8706-4d7460891989 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.701119] env[69927]: DEBUG oslo_vmware.api [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for the task: (returnval){ [ 1036.701119] env[69927]: value = "task-4096349" [ 1036.701119] env[69927]: _type = "Task" [ 1036.701119] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.713983] env[69927]: DEBUG oslo_vmware.api [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096349, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.802686] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096341, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.823689] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d5f11d01-6150-4eb3-9ac3-e753af0bcba7 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1036.825031] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.306s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.828603] env[69927]: INFO nova.compute.claims [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1036.854535] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096346, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.667366} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.854818] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1036.855674] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b52d479-6ab3-487f-88d8-58d83db742e6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.888240] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2/ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1036.888240] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d5328a4-300d-4635-a3b6-a7c880d3e364 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.914702] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for the task: (returnval){ [ 1036.914702] env[69927]: value = "task-4096350" [ 1036.914702] env[69927]: _type = "Task" [ 1036.914702] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.927038] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096350, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.082430] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096345, 'name': CreateVM_Task, 'duration_secs': 2.429657} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.082430] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1037.083108] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.083598] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1037.083987] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1037.084306] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7274fda0-5751-45ff-98ad-067b46c30172 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.091731] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1037.091731] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528c43bb-d8da-d890-f2d5-35b1744c4b40" [ 1037.091731] env[69927]: _type = "Task" [ 1037.091731] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.101699] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528c43bb-d8da-d890-f2d5-35b1744c4b40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.214855] env[69927]: DEBUG oslo_vmware.api [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Task: {'id': task-4096349, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.322938} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.215120] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1037.215321] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1037.215581] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1037.215807] env[69927]: INFO nova.compute.manager [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1037.216523] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1037.216982] env[69927]: DEBUG nova.compute.manager [-] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1037.217178] env[69927]: DEBUG nova.network.neutron [-] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1037.304175] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096341, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.780741} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.304175] env[69927]: INFO nova.virt.vmwareapi.ds_util [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_fc1e0a1e-6c45-4307-8e04-2aeae5f05726/OSTACK_IMG_fc1e0a1e-6c45-4307-8e04-2aeae5f05726.vmdk to [datastore1] devstack-image-cache_base/c87f5a71-3793-40fd-ad59-7b537237d25e/c87f5a71-3793-40fd-ad59-7b537237d25e.vmdk. [ 1037.304175] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Cleaning up location [datastore1] OSTACK_IMG_fc1e0a1e-6c45-4307-8e04-2aeae5f05726 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1037.304175] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_fc1e0a1e-6c45-4307-8e04-2aeae5f05726 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1037.304480] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1ec2106-d8d0-4c65-8f6f-376579a9454b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.313801] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1037.313801] env[69927]: value = "task-4096351" [ 1037.313801] env[69927]: _type = "Task" [ 1037.313801] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.323657] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096351, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.425857] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096350, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.603792] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528c43bb-d8da-d890-f2d5-35b1744c4b40, 'name': SearchDatastore_Task, 'duration_secs': 0.053389} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.604139] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1037.604389] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1037.604632] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.604809] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1037.605135] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1037.605548] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d32cbda1-9af3-41e1-a659-942cbc6f1425 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.621858] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1037.622063] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1037.622851] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27a511ba-39e4-428d-8709-90f0753b2d00 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.635884] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1037.635884] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52dfbee4-5afb-ba9e-15de-df82617409b3" [ 1037.635884] env[69927]: _type = "Task" [ 1037.635884] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.646364] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52dfbee4-5afb-ba9e-15de-df82617409b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.829060] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096351, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.224805} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.829060] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1037.829060] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c87f5a71-3793-40fd-ad59-7b537237d25e/c87f5a71-3793-40fd-ad59-7b537237d25e.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1037.829060] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c87f5a71-3793-40fd-ad59-7b537237d25e/c87f5a71-3793-40fd-ad59-7b537237d25e.vmdk to [datastore1] b750ce2c-ee85-46c6-bf12-edb3f088e6de/b750ce2c-ee85-46c6-bf12-edb3f088e6de.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1037.829060] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d01ef901-a199-48d7-a53f-44f3d091c68b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.836337] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1037.836337] env[69927]: value = "task-4096352" [ 1037.836337] env[69927]: _type = "Task" [ 1037.836337] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.848669] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096352, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.865912] env[69927]: DEBUG nova.compute.manager [req-c08323c2-28bd-49e5-98ed-6dc3d4761f03 req-4bdf8748-0f79-4830-9479-b1606aab3af7 service nova] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Received event network-vif-deleted-67c7df0a-bb67-40ee-9a2d-11cea9dbacb7 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1037.865912] env[69927]: INFO nova.compute.manager [req-c08323c2-28bd-49e5-98ed-6dc3d4761f03 req-4bdf8748-0f79-4830-9479-b1606aab3af7 service nova] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Neutron deleted interface 67c7df0a-bb67-40ee-9a2d-11cea9dbacb7; detaching it from the instance and deleting it from the info cache [ 1037.865912] env[69927]: DEBUG nova.network.neutron [req-c08323c2-28bd-49e5-98ed-6dc3d4761f03 req-4bdf8748-0f79-4830-9479-b1606aab3af7 service nova] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.940736] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096350, 'name': ReconfigVM_Task, 'duration_secs': 0.940309} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.940736] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Reconfigured VM instance instance-0000004d to attach disk [datastore1] ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2/ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1037.940736] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fbab69aa-3ec7-4269-b812-dcaa298979f6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.945469] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for the task: (returnval){ [ 1037.945469] env[69927]: value = "task-4096353" [ 1037.945469] env[69927]: _type = "Task" [ 1037.945469] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.961019] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096353, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.158071] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52dfbee4-5afb-ba9e-15de-df82617409b3, 'name': SearchDatastore_Task, 'duration_secs': 0.058746} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.162057] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6433ffe-b28b-41f6-bb05-fefd550913c7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.171019] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1038.171019] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52eea904-2f1d-9fc5-ed39-5adfeafb351c" [ 1038.171019] env[69927]: _type = "Task" [ 1038.171019] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.187954] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52eea904-2f1d-9fc5-ed39-5adfeafb351c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.288964] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6129868-01dc-4d6c-96f8-5a530a3ea2b8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.300993] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd48840-4402-4e13-8aba-7c12202c34fc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.344322] env[69927]: DEBUG nova.network.neutron [-] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.349949] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-111cd76c-7611-44ed-beb6-19a27b4674dc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.360525] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096352, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.365018] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc014a78-90c6-4686-ae87-faa1ec505538 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.369934] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a8e83314-96fc-4872-8062-db05320833dd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.387784] env[69927]: DEBUG nova.compute.provider_tree [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1038.394016] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da0a02c0-8184-4ae1-bf73-b3e9363a3bc8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.439232] env[69927]: DEBUG nova.compute.manager [req-c08323c2-28bd-49e5-98ed-6dc3d4761f03 req-4bdf8748-0f79-4830-9479-b1606aab3af7 service nova] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Detach interface failed, port_id=67c7df0a-bb67-40ee-9a2d-11cea9dbacb7, reason: Instance 9348e368-cc3c-4bde-91ae-26fd03ad536a could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1038.460185] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096353, 'name': Rename_Task, 'duration_secs': 0.241668} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.460185] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1038.460185] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aaac3a5b-9c36-449e-8ce5-6f16e0d95e50 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.469029] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for the task: (returnval){ [ 1038.469029] env[69927]: value = "task-4096354" [ 1038.469029] env[69927]: _type = "Task" [ 1038.469029] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.490167] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096354, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.685687] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52eea904-2f1d-9fc5-ed39-5adfeafb351c, 'name': SearchDatastore_Task, 'duration_secs': 0.070744} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.686164] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1038.686546] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a/cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1038.686939] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5e7fbe7e-27e4-4edf-9a0c-f4a9b24c87bc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.699158] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1038.699158] env[69927]: value = "task-4096355" [ 1038.699158] env[69927]: _type = "Task" [ 1038.699158] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.720843] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096355, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.847061] env[69927]: INFO nova.compute.manager [-] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Took 1.63 seconds to deallocate network for instance. [ 1038.864099] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096352, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.892251] env[69927]: DEBUG nova.scheduler.client.report [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1038.983733] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096354, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.214551] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096355, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.363727] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096352, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.367267] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.404742] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.578s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.404742] env[69927]: DEBUG nova.compute.manager [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1039.411273] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.601s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.411671] env[69927]: INFO nova.compute.claims [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1039.483529] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096354, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.715188] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096355, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.860458] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096352, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.912889] env[69927]: DEBUG nova.compute.utils [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1039.915509] env[69927]: DEBUG nova.compute.manager [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1039.915763] env[69927]: DEBUG nova.network.neutron [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1039.980695] env[69927]: DEBUG nova.policy [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fb82f5cf88084b4d891e106e6d033c76', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8352266666b8479faa3232f53fb5d768', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1039.988668] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096354, 'name': PowerOnVM_Task} progress is 76%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.216674] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096355, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.362041] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096352, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.419710] env[69927]: DEBUG nova.compute.manager [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1040.427442] env[69927]: DEBUG nova.network.neutron [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Successfully created port: 62623986-fc5c-4de6-943f-7bdd5b400e04 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1040.491582] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096354, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.715404] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096355, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.861525] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096352, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.656442} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.865276] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c87f5a71-3793-40fd-ad59-7b537237d25e/c87f5a71-3793-40fd-ad59-7b537237d25e.vmdk to [datastore1] b750ce2c-ee85-46c6-bf12-edb3f088e6de/b750ce2c-ee85-46c6-bf12-edb3f088e6de.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1040.871321] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f8b14b-b623-4dfe-b520-2cf01532432e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.903387] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] b750ce2c-ee85-46c6-bf12-edb3f088e6de/b750ce2c-ee85-46c6-bf12-edb3f088e6de.vmdk or device None with type streamOptimized {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1040.909327] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-897819ad-7582-4fa4-84d6-0218b0fe7df0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.948121] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1040.948121] env[69927]: value = "task-4096356" [ 1040.948121] env[69927]: _type = "Task" [ 1040.948121] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.959962] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3f9dc73-df6f-4e9c-b8b8-2ece4dad8d37 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.967722] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096356, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.974288] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e2b84ad-9bcf-48f1-b230-17c4fb88d63c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.999972] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096354, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.042598] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc51f4ac-67ea-4ed9-8b26-d6d27b91bb0e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.056412] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65895dd9-c7bd-4141-b0a2-7c2fa626abe5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.088707] env[69927]: DEBUG nova.compute.provider_tree [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1041.212542] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096355, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.24472} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.212854] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a/cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1041.213107] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1041.213404] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4dccb2f1-30f7-4f10-ba42-c69c8fe9977c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.221850] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1041.221850] env[69927]: value = "task-4096357" [ 1041.221850] env[69927]: _type = "Task" [ 1041.221850] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.232038] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096357, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.440388] env[69927]: DEBUG nova.compute.manager [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1041.462743] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096356, 'name': ReconfigVM_Task, 'duration_secs': 0.342338} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.465350] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Reconfigured VM instance instance-0000004e to attach disk [datastore1] b750ce2c-ee85-46c6-bf12-edb3f088e6de/b750ce2c-ee85-46c6-bf12-edb3f088e6de.vmdk or device None with type streamOptimized {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1041.466684] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e7153905-3032-49cb-9eca-5ac303a49db7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.476250] env[69927]: DEBUG nova.virt.hardware [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1041.476531] env[69927]: DEBUG nova.virt.hardware [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1041.476680] env[69927]: DEBUG nova.virt.hardware [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1041.476863] env[69927]: DEBUG nova.virt.hardware [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1041.477235] env[69927]: DEBUG nova.virt.hardware [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1041.477472] env[69927]: DEBUG nova.virt.hardware [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1041.477712] env[69927]: DEBUG nova.virt.hardware [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1041.477905] env[69927]: DEBUG nova.virt.hardware [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1041.478159] env[69927]: DEBUG nova.virt.hardware [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1041.478347] env[69927]: DEBUG nova.virt.hardware [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1041.478525] env[69927]: DEBUG nova.virt.hardware [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1041.480545] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c78eda-6637-4081-97e1-363f2649c485 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.485366] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1041.485366] env[69927]: value = "task-4096358" [ 1041.485366] env[69927]: _type = "Task" [ 1041.485366] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.499521] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-587be12b-5e5a-44fc-b6c5-fc6b72e67faa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.516672] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096358, 'name': Rename_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.516672] env[69927]: DEBUG oslo_vmware.api [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096354, 'name': PowerOnVM_Task, 'duration_secs': 2.600027} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.516672] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1041.516672] env[69927]: INFO nova.compute.manager [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Took 12.07 seconds to spawn the instance on the hypervisor. [ 1041.516672] env[69927]: DEBUG nova.compute.manager [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1041.517294] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce7ec2a3-8e66-4b9a-9866-14419fd82022 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.590565] env[69927]: DEBUG nova.scheduler.client.report [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1041.733707] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096357, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080318} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.733771] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1041.734652] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83fba36-25dd-43a2-881d-757043d461f4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.767968] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a/cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1041.768206] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b42c4618-4827-473e-a342-beed76752806 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.789894] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1041.789894] env[69927]: value = "task-4096359" [ 1041.789894] env[69927]: _type = "Task" [ 1041.789894] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.800103] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096359, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.003277] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096358, 'name': Rename_Task, 'duration_secs': 0.165344} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.004018] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1042.004638] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8c60329-6d1f-405d-bba9-fdff77d4ff52 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.015261] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1042.015261] env[69927]: value = "task-4096360" [ 1042.015261] env[69927]: _type = "Task" [ 1042.015261] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.026028] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096360, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.048558] env[69927]: INFO nova.compute.manager [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Took 49.03 seconds to build instance. [ 1042.098906] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.686s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.098906] env[69927]: DEBUG nova.compute.manager [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1042.100439] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.110s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.103520] env[69927]: INFO nova.compute.claims [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1042.141304] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Acquiring lock "dd4c3963-aa58-49f2-b675-9863ff13bddf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1042.141863] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Lock "dd4c3963-aa58-49f2-b675-9863ff13bddf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.274168] env[69927]: DEBUG nova.compute.manager [req-a7c15327-7914-4c73-beb8-4317ff766f12 req-ea89622f-bb4e-4b19-afee-73d01bb6b019 service nova] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Received event network-vif-plugged-62623986-fc5c-4de6-943f-7bdd5b400e04 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1042.274168] env[69927]: DEBUG oslo_concurrency.lockutils [req-a7c15327-7914-4c73-beb8-4317ff766f12 req-ea89622f-bb4e-4b19-afee-73d01bb6b019 service nova] Acquiring lock "3936a3db-4afa-4a37-9d63-8c18b6b72c72-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1042.274168] env[69927]: DEBUG oslo_concurrency.lockutils [req-a7c15327-7914-4c73-beb8-4317ff766f12 req-ea89622f-bb4e-4b19-afee-73d01bb6b019 service nova] Lock "3936a3db-4afa-4a37-9d63-8c18b6b72c72-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.274168] env[69927]: DEBUG oslo_concurrency.lockutils [req-a7c15327-7914-4c73-beb8-4317ff766f12 req-ea89622f-bb4e-4b19-afee-73d01bb6b019 service nova] Lock "3936a3db-4afa-4a37-9d63-8c18b6b72c72-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.274168] env[69927]: DEBUG nova.compute.manager [req-a7c15327-7914-4c73-beb8-4317ff766f12 req-ea89622f-bb4e-4b19-afee-73d01bb6b019 service nova] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] No waiting events found dispatching network-vif-plugged-62623986-fc5c-4de6-943f-7bdd5b400e04 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1042.274168] env[69927]: WARNING nova.compute.manager [req-a7c15327-7914-4c73-beb8-4317ff766f12 req-ea89622f-bb4e-4b19-afee-73d01bb6b019 service nova] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Received unexpected event network-vif-plugged-62623986-fc5c-4de6-943f-7bdd5b400e04 for instance with vm_state building and task_state spawning. [ 1042.310267] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096359, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.501229] env[69927]: DEBUG nova.network.neutron [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Successfully updated port: 62623986-fc5c-4de6-943f-7bdd5b400e04 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1042.528776] env[69927]: DEBUG oslo_vmware.api [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096360, 'name': PowerOnVM_Task, 'duration_secs': 0.490181} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.529419] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1042.529766] env[69927]: INFO nova.compute.manager [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Took 15.78 seconds to spawn the instance on the hypervisor. [ 1042.530145] env[69927]: DEBUG nova.compute.manager [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1042.531767] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76eecb27-2121-41fe-91c3-2e827ac3465d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.554839] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b9782057-fdc4-422d-9532-0d63dac69b63 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Lock "ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.871s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.602051] env[69927]: DEBUG nova.compute.utils [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1042.604218] env[69927]: DEBUG nova.compute.manager [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1042.604218] env[69927]: DEBUG nova.network.neutron [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1042.647354] env[69927]: DEBUG nova.compute.manager [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1042.701554] env[69927]: DEBUG nova.policy [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ed20f23b4104e2ea75ea29b804c79d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ed984d7170742eca7e89bf3bf45e6ae', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1042.802342] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096359, 'name': ReconfigVM_Task, 'duration_secs': 0.796481} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.803229] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Reconfigured VM instance instance-0000004f to attach disk [datastore1] cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a/cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1042.803386] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-794ee2ce-8ff0-47df-8b2c-6af92fd5c574 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.814734] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1042.814734] env[69927]: value = "task-4096361" [ 1042.814734] env[69927]: _type = "Task" [ 1042.814734] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.827598] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096361, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.004318] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Acquiring lock "refresh_cache-3936a3db-4afa-4a37-9d63-8c18b6b72c72" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.004586] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Acquired lock "refresh_cache-3936a3db-4afa-4a37-9d63-8c18b6b72c72" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1043.004682] env[69927]: DEBUG nova.network.neutron [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1043.056245] env[69927]: INFO nova.compute.manager [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Took 55.91 seconds to build instance. [ 1043.110395] env[69927]: DEBUG nova.compute.manager [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1043.167704] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.335042] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096361, 'name': Rename_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.400026] env[69927]: DEBUG oslo_concurrency.lockutils [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "b750ce2c-ee85-46c6-bf12-edb3f088e6de" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.477232] env[69927]: DEBUG oslo_concurrency.lockutils [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Acquiring lock "7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.477531] env[69927]: DEBUG oslo_concurrency.lockutils [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Lock "7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.477752] env[69927]: DEBUG oslo_concurrency.lockutils [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Acquiring lock "7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.477938] env[69927]: DEBUG oslo_concurrency.lockutils [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Lock "7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.478119] env[69927]: DEBUG oslo_concurrency.lockutils [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Lock "7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.482617] env[69927]: INFO nova.compute.manager [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Terminating instance [ 1043.545826] env[69927]: DEBUG nova.network.neutron [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1043.556854] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7cff6c9d-93b3-42d8-93eb-f29f203a7583 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "b750ce2c-ee85-46c6-bf12-edb3f088e6de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.878s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.557142] env[69927]: DEBUG oslo_concurrency.lockutils [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "b750ce2c-ee85-46c6-bf12-edb3f088e6de" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.160s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.557381] env[69927]: DEBUG oslo_concurrency.lockutils [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "b750ce2c-ee85-46c6-bf12-edb3f088e6de-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.557583] env[69927]: DEBUG oslo_concurrency.lockutils [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "b750ce2c-ee85-46c6-bf12-edb3f088e6de-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.557747] env[69927]: DEBUG oslo_concurrency.lockutils [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "b750ce2c-ee85-46c6-bf12-edb3f088e6de-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.559898] env[69927]: INFO nova.compute.manager [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Terminating instance [ 1043.589239] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5700aff-e349-4050-88e1-d28af6961aba {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.601048] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37354471-4698-43c3-855f-b12f2ab0f88b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.648076] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e25908d-0329-4673-8b32-4ef272e452c7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.654635] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79093793-f566-42d2-95f5-259fe75f6940 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.675387] env[69927]: DEBUG nova.compute.provider_tree [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1043.709476] env[69927]: DEBUG nova.network.neutron [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Successfully created port: 31239db7-86bd-4d24-b54f-414bd1d5a3d1 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1043.797275] env[69927]: DEBUG nova.network.neutron [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Updating instance_info_cache with network_info: [{"id": "62623986-fc5c-4de6-943f-7bdd5b400e04", "address": "fa:16:3e:ee:57:e2", "network": {"id": "aa361ccd-9ac0-4d67-b1af-fb583b30dc1a", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1476190609-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8352266666b8479faa3232f53fb5d768", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "360308f4-9d0a-4ec2-8bcf-44891f452847", "external-id": "nsx-vlan-transportzone-383", "segmentation_id": 383, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62623986-fc", "ovs_interfaceid": "62623986-fc5c-4de6-943f-7bdd5b400e04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.834927] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096361, 'name': Rename_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.996421] env[69927]: DEBUG nova.compute.manager [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1043.996421] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1043.997966] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fbe13c2-ee1b-4351-9124-33a926b5eb91 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.005505] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1044.005766] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1db3a877-d8a2-4d2c-a0ab-dfdd63fcf554 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.015834] env[69927]: DEBUG oslo_vmware.api [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Waiting for the task: (returnval){ [ 1044.015834] env[69927]: value = "task-4096362" [ 1044.015834] env[69927]: _type = "Task" [ 1044.015834] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.028853] env[69927]: DEBUG oslo_vmware.api [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Task: {'id': task-4096362, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.065195] env[69927]: DEBUG nova.compute.manager [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1044.065852] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1044.066363] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c603d434-0597-4096-bf5e-608a4ee0e383 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.075592] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1044.075740] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd0b4d56-e429-4040-a482-0db93f5d05c8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.083608] env[69927]: DEBUG oslo_vmware.api [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1044.083608] env[69927]: value = "task-4096363" [ 1044.083608] env[69927]: _type = "Task" [ 1044.083608] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.095454] env[69927]: DEBUG oslo_vmware.api [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096363, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.150817] env[69927]: DEBUG nova.compute.manager [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1044.174744] env[69927]: DEBUG nova.virt.hardware [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1044.175061] env[69927]: DEBUG nova.virt.hardware [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1044.175251] env[69927]: DEBUG nova.virt.hardware [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1044.175459] env[69927]: DEBUG nova.virt.hardware [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1044.175630] env[69927]: DEBUG nova.virt.hardware [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1044.175785] env[69927]: DEBUG nova.virt.hardware [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1044.176014] env[69927]: DEBUG nova.virt.hardware [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1044.176210] env[69927]: DEBUG nova.virt.hardware [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1044.176392] env[69927]: DEBUG nova.virt.hardware [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1044.176572] env[69927]: DEBUG nova.virt.hardware [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1044.176755] env[69927]: DEBUG nova.virt.hardware [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1044.177792] env[69927]: DEBUG nova.scheduler.client.report [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1044.182539] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96698073-7634-4cd2-9d6f-4dbee3131e12 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.192838] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ae41cff-4939-44bf-bcd4-1e8e7105d625 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.303286] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Releasing lock "refresh_cache-3936a3db-4afa-4a37-9d63-8c18b6b72c72" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1044.303286] env[69927]: DEBUG nova.compute.manager [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Instance network_info: |[{"id": "62623986-fc5c-4de6-943f-7bdd5b400e04", "address": "fa:16:3e:ee:57:e2", "network": {"id": "aa361ccd-9ac0-4d67-b1af-fb583b30dc1a", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1476190609-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8352266666b8479faa3232f53fb5d768", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "360308f4-9d0a-4ec2-8bcf-44891f452847", "external-id": "nsx-vlan-transportzone-383", "segmentation_id": 383, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62623986-fc", "ovs_interfaceid": "62623986-fc5c-4de6-943f-7bdd5b400e04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1044.303286] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:57:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '360308f4-9d0a-4ec2-8bcf-44891f452847', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '62623986-fc5c-4de6-943f-7bdd5b400e04', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1044.313308] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Creating folder: Project (8352266666b8479faa3232f53fb5d768). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1044.313644] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-62666064-b0ac-454c-a724-d2b041249ce7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.326134] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Created folder: Project (8352266666b8479faa3232f53fb5d768) in parent group-v811283. [ 1044.326553] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Creating folder: Instances. Parent ref: group-v811514. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1044.330320] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f6b9fea6-caa1-45b0-8da5-c63e819a94d0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.337582] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096361, 'name': Rename_Task, 'duration_secs': 1.459257} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.337860] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1044.338122] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a010a579-18e1-4bef-86d6-ea5446742540 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.341483] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Created folder: Instances in parent group-v811514. [ 1044.341718] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1044.342269] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1044.342479] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ac3c9155-e7e6-4f1e-88cd-a09c0bb2b93c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.360418] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1044.360418] env[69927]: value = "task-4096366" [ 1044.360418] env[69927]: _type = "Task" [ 1044.360418] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.366480] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1044.366480] env[69927]: value = "task-4096367" [ 1044.366480] env[69927]: _type = "Task" [ 1044.366480] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.373777] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096366, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.379198] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096367, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.434523] env[69927]: DEBUG nova.compute.manager [req-f6f9129c-d483-431b-a32a-b42b94fc1801 req-fe542fb2-dbdd-4dde-8223-edd11226a712 service nova] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Received event network-changed-62623986-fc5c-4de6-943f-7bdd5b400e04 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1044.434683] env[69927]: DEBUG nova.compute.manager [req-f6f9129c-d483-431b-a32a-b42b94fc1801 req-fe542fb2-dbdd-4dde-8223-edd11226a712 service nova] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Refreshing instance network info cache due to event network-changed-62623986-fc5c-4de6-943f-7bdd5b400e04. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1044.434927] env[69927]: DEBUG oslo_concurrency.lockutils [req-f6f9129c-d483-431b-a32a-b42b94fc1801 req-fe542fb2-dbdd-4dde-8223-edd11226a712 service nova] Acquiring lock "refresh_cache-3936a3db-4afa-4a37-9d63-8c18b6b72c72" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.435381] env[69927]: DEBUG oslo_concurrency.lockutils [req-f6f9129c-d483-431b-a32a-b42b94fc1801 req-fe542fb2-dbdd-4dde-8223-edd11226a712 service nova] Acquired lock "refresh_cache-3936a3db-4afa-4a37-9d63-8c18b6b72c72" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1044.435381] env[69927]: DEBUG nova.network.neutron [req-f6f9129c-d483-431b-a32a-b42b94fc1801 req-fe542fb2-dbdd-4dde-8223-edd11226a712 service nova] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Refreshing network info cache for port 62623986-fc5c-4de6-943f-7bdd5b400e04 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1044.526981] env[69927]: DEBUG oslo_vmware.api [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Task: {'id': task-4096362, 'name': PowerOffVM_Task, 'duration_secs': 0.288243} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.527536] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1044.527536] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1044.527814] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d3b92fe-44ff-439f-8513-796b8f29457a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.594970] env[69927]: DEBUG oslo_vmware.api [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096363, 'name': PowerOffVM_Task, 'duration_secs': 0.249322} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.595925] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1044.596790] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1044.596790] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-be8fe044-841e-4d7a-ac99-7bc6c9b7965a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.610293] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1044.610388] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1044.610538] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Deleting the datastore file [datastore2] 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1044.610819] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ffc1aa61-b0d8-43c7-b3e3-f4637eb5ba5f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.640381] env[69927]: DEBUG oslo_vmware.api [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Waiting for the task: (returnval){ [ 1044.640381] env[69927]: value = "task-4096370" [ 1044.640381] env[69927]: _type = "Task" [ 1044.640381] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.693304] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.590s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1044.693304] env[69927]: DEBUG nova.compute.manager [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1044.694536] env[69927]: DEBUG oslo_concurrency.lockutils [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.479s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1044.694775] env[69927]: DEBUG nova.objects.instance [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Lazy-loading 'resources' on Instance uuid c87680be-227e-4a3e-92d3-c2310623bfe4 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1044.724594] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1044.725121] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1044.725385] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Deleting the datastore file [datastore1] b750ce2c-ee85-46c6-bf12-edb3f088e6de {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1044.725701] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1a1c5217-8336-4db5-bde2-f694f31eaf1c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.735580] env[69927]: DEBUG oslo_vmware.api [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1044.735580] env[69927]: value = "task-4096371" [ 1044.735580] env[69927]: _type = "Task" [ 1044.735580] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.746625] env[69927]: DEBUG oslo_vmware.api [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096371, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.875560] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096366, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.884652] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096367, 'name': CreateVM_Task, 'duration_secs': 0.452962} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.888032] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1044.888032] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.888032] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1044.888032] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1044.888032] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49fbde6d-73b9-480d-af81-77a63bd443d3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.894241] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Waiting for the task: (returnval){ [ 1044.894241] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5210cb87-d400-5acc-41b8-24a8c797b17c" [ 1044.894241] env[69927]: _type = "Task" [ 1044.894241] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.907876] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5210cb87-d400-5acc-41b8-24a8c797b17c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.149046] env[69927]: DEBUG oslo_vmware.api [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Task: {'id': task-4096370, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.202342} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.149801] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1045.150827] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1045.150827] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1045.150983] env[69927]: INFO nova.compute.manager [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1045.151362] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1045.151663] env[69927]: DEBUG nova.compute.manager [-] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1045.151819] env[69927]: DEBUG nova.network.neutron [-] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1045.203234] env[69927]: DEBUG nova.compute.utils [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1045.205938] env[69927]: DEBUG nova.compute.manager [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1045.206276] env[69927]: DEBUG nova.network.neutron [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1045.211426] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1045.211652] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.247852] env[69927]: DEBUG oslo_vmware.api [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096371, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184464} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.251245] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1045.251446] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1045.251619] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1045.251888] env[69927]: INFO nova.compute.manager [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1045.252128] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1045.253273] env[69927]: DEBUG nova.compute.manager [-] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1045.253381] env[69927]: DEBUG nova.network.neutron [-] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1045.282150] env[69927]: DEBUG nova.network.neutron [req-f6f9129c-d483-431b-a32a-b42b94fc1801 req-fe542fb2-dbdd-4dde-8223-edd11226a712 service nova] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Updated VIF entry in instance network info cache for port 62623986-fc5c-4de6-943f-7bdd5b400e04. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1045.282513] env[69927]: DEBUG nova.network.neutron [req-f6f9129c-d483-431b-a32a-b42b94fc1801 req-fe542fb2-dbdd-4dde-8223-edd11226a712 service nova] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Updating instance_info_cache with network_info: [{"id": "62623986-fc5c-4de6-943f-7bdd5b400e04", "address": "fa:16:3e:ee:57:e2", "network": {"id": "aa361ccd-9ac0-4d67-b1af-fb583b30dc1a", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1476190609-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8352266666b8479faa3232f53fb5d768", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "360308f4-9d0a-4ec2-8bcf-44891f452847", "external-id": "nsx-vlan-transportzone-383", "segmentation_id": 383, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62623986-fc", "ovs_interfaceid": "62623986-fc5c-4de6-943f-7bdd5b400e04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.289894] env[69927]: DEBUG nova.policy [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd98497c8260f4692b8d5410447575350', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'babf071cfc564f7d83c28d449c774840', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1045.374034] env[69927]: DEBUG oslo_vmware.api [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096366, 'name': PowerOnVM_Task, 'duration_secs': 0.633165} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.374034] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1045.374034] env[69927]: INFO nova.compute.manager [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Took 13.31 seconds to spawn the instance on the hypervisor. [ 1045.374034] env[69927]: DEBUG nova.compute.manager [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1045.375132] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88159d6e-2881-42bc-b242-4ec341ad717c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.407339] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5210cb87-d400-5acc-41b8-24a8c797b17c, 'name': SearchDatastore_Task, 'duration_secs': 0.01144} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.410435] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1045.411151] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1045.412447] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.412619] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1045.412776] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1045.413937] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5fef2e44-5e45-470d-9c65-3fc8d0ed6497 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.430340] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1045.430782] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1045.432135] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9573be6c-d0e6-4570-adda-fd689a807f18 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.440665] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Waiting for the task: (returnval){ [ 1045.440665] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528377cc-6bc4-5b9c-0bf9-4918179b9b75" [ 1045.440665] env[69927]: _type = "Task" [ 1045.440665] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.455569] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528377cc-6bc4-5b9c-0bf9-4918179b9b75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.464106] env[69927]: DEBUG nova.network.neutron [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Successfully updated port: 31239db7-86bd-4d24-b54f-414bd1d5a3d1 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1045.622984] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453d5f6a-dbb6-4504-9f3c-2319b03026d1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.631017] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfcb854a-4a25-484d-9dd1-8a109df25b89 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.666259] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2944eb78-c52a-4be7-beb8-5863818f11f9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.679422] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f201c94-dc33-44bd-a849-d45bb16d2919 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.689519] env[69927]: DEBUG nova.compute.provider_tree [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1045.715073] env[69927]: DEBUG nova.compute.manager [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1045.719459] env[69927]: DEBUG nova.compute.manager [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1045.731228] env[69927]: DEBUG nova.network.neutron [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Successfully created port: ace9865c-a7d5-41ad-abee-d70d3aa00dc0 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1045.788343] env[69927]: DEBUG oslo_concurrency.lockutils [req-f6f9129c-d483-431b-a32a-b42b94fc1801 req-fe542fb2-dbdd-4dde-8223-edd11226a712 service nova] Releasing lock "refresh_cache-3936a3db-4afa-4a37-9d63-8c18b6b72c72" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1045.788645] env[69927]: DEBUG nova.compute.manager [req-f6f9129c-d483-431b-a32a-b42b94fc1801 req-fe542fb2-dbdd-4dde-8223-edd11226a712 service nova] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Received event network-changed-8c5792b5-80a0-4414-bb3b-ae6e25874202 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1045.788818] env[69927]: DEBUG nova.compute.manager [req-f6f9129c-d483-431b-a32a-b42b94fc1801 req-fe542fb2-dbdd-4dde-8223-edd11226a712 service nova] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Refreshing instance network info cache due to event network-changed-8c5792b5-80a0-4414-bb3b-ae6e25874202. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1045.789045] env[69927]: DEBUG oslo_concurrency.lockutils [req-f6f9129c-d483-431b-a32a-b42b94fc1801 req-fe542fb2-dbdd-4dde-8223-edd11226a712 service nova] Acquiring lock "refresh_cache-ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.789195] env[69927]: DEBUG oslo_concurrency.lockutils [req-f6f9129c-d483-431b-a32a-b42b94fc1801 req-fe542fb2-dbdd-4dde-8223-edd11226a712 service nova] Acquired lock "refresh_cache-ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1045.789453] env[69927]: DEBUG nova.network.neutron [req-f6f9129c-d483-431b-a32a-b42b94fc1801 req-fe542fb2-dbdd-4dde-8223-edd11226a712 service nova] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Refreshing network info cache for port 8c5792b5-80a0-4414-bb3b-ae6e25874202 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1045.903938] env[69927]: INFO nova.compute.manager [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Took 50.17 seconds to build instance. [ 1045.951953] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528377cc-6bc4-5b9c-0bf9-4918179b9b75, 'name': SearchDatastore_Task, 'duration_secs': 0.030452} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.952769] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c5350b9-ddf7-4f33-8de8-474a2fef9bcd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.960104] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Waiting for the task: (returnval){ [ 1045.960104] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b88051-5359-7b88-7e1a-28ae974f90f5" [ 1045.960104] env[69927]: _type = "Task" [ 1045.960104] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.970452] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "refresh_cache-20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.970452] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "refresh_cache-20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1045.970452] env[69927]: DEBUG nova.network.neutron [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1045.971304] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b88051-5359-7b88-7e1a-28ae974f90f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.194125] env[69927]: DEBUG nova.scheduler.client.report [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1046.249021] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1046.261407] env[69927]: DEBUG nova.network.neutron [-] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.389271] env[69927]: DEBUG nova.network.neutron [-] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.408027] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9315a829-a9be-4bcf-854f-d29c8d7b6b18 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.889s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.478649] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b88051-5359-7b88-7e1a-28ae974f90f5, 'name': SearchDatastore_Task, 'duration_secs': 0.030916} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.479973] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1046.479973] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 3936a3db-4afa-4a37-9d63-8c18b6b72c72/3936a3db-4afa-4a37-9d63-8c18b6b72c72.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1046.482024] env[69927]: DEBUG nova.compute.manager [req-c3d9a2a7-f7db-4232-bb6e-8877eadc1712 req-23bf612c-ec98-4828-a161-e8acc9840de2 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Received event network-vif-plugged-31239db7-86bd-4d24-b54f-414bd1d5a3d1 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1046.482024] env[69927]: DEBUG oslo_concurrency.lockutils [req-c3d9a2a7-f7db-4232-bb6e-8877eadc1712 req-23bf612c-ec98-4828-a161-e8acc9840de2 service nova] Acquiring lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1046.482024] env[69927]: DEBUG oslo_concurrency.lockutils [req-c3d9a2a7-f7db-4232-bb6e-8877eadc1712 req-23bf612c-ec98-4828-a161-e8acc9840de2 service nova] Lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1046.482024] env[69927]: DEBUG oslo_concurrency.lockutils [req-c3d9a2a7-f7db-4232-bb6e-8877eadc1712 req-23bf612c-ec98-4828-a161-e8acc9840de2 service nova] Lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.482024] env[69927]: DEBUG nova.compute.manager [req-c3d9a2a7-f7db-4232-bb6e-8877eadc1712 req-23bf612c-ec98-4828-a161-e8acc9840de2 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] No waiting events found dispatching network-vif-plugged-31239db7-86bd-4d24-b54f-414bd1d5a3d1 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1046.482698] env[69927]: WARNING nova.compute.manager [req-c3d9a2a7-f7db-4232-bb6e-8877eadc1712 req-23bf612c-ec98-4828-a161-e8acc9840de2 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Received unexpected event network-vif-plugged-31239db7-86bd-4d24-b54f-414bd1d5a3d1 for instance with vm_state building and task_state spawning. [ 1046.482698] env[69927]: DEBUG nova.compute.manager [req-c3d9a2a7-f7db-4232-bb6e-8877eadc1712 req-23bf612c-ec98-4828-a161-e8acc9840de2 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Received event network-changed-31239db7-86bd-4d24-b54f-414bd1d5a3d1 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1046.482698] env[69927]: DEBUG nova.compute.manager [req-c3d9a2a7-f7db-4232-bb6e-8877eadc1712 req-23bf612c-ec98-4828-a161-e8acc9840de2 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Refreshing instance network info cache due to event network-changed-31239db7-86bd-4d24-b54f-414bd1d5a3d1. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1046.482698] env[69927]: DEBUG oslo_concurrency.lockutils [req-c3d9a2a7-f7db-4232-bb6e-8877eadc1712 req-23bf612c-ec98-4828-a161-e8acc9840de2 service nova] Acquiring lock "refresh_cache-20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.483334] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1cf89c3e-494a-49bb-93ed-77825da12d77 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.491439] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Waiting for the task: (returnval){ [ 1046.491439] env[69927]: value = "task-4096372" [ 1046.491439] env[69927]: _type = "Task" [ 1046.491439] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.501949] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': task-4096372, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.538104] env[69927]: DEBUG nova.network.neutron [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1046.653645] env[69927]: DEBUG nova.network.neutron [req-f6f9129c-d483-431b-a32a-b42b94fc1801 req-fe542fb2-dbdd-4dde-8223-edd11226a712 service nova] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Updated VIF entry in instance network info cache for port 8c5792b5-80a0-4414-bb3b-ae6e25874202. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1046.653990] env[69927]: DEBUG nova.network.neutron [req-f6f9129c-d483-431b-a32a-b42b94fc1801 req-fe542fb2-dbdd-4dde-8223-edd11226a712 service nova] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Updating instance_info_cache with network_info: [{"id": "8c5792b5-80a0-4414-bb3b-ae6e25874202", "address": "fa:16:3e:26:f8:b0", "network": {"id": "59c5a413-ed97-4651-a37f-e9ed6e46972e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1832571141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fa2f3f0abc7474196dfbee4f8c09d3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4a9e02-45f1-4afb-8abb-0de26b153086", "external-id": "nsx-vlan-transportzone-336", "segmentation_id": 336, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c5792b5-80", "ovs_interfaceid": "8c5792b5-80a0-4414-bb3b-ae6e25874202", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.700726] env[69927]: DEBUG oslo_concurrency.lockutils [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.006s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.705021] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 30.105s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1046.705237] env[69927]: DEBUG nova.objects.instance [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69927) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1046.723066] env[69927]: DEBUG nova.network.neutron [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Updating instance_info_cache with network_info: [{"id": "31239db7-86bd-4d24-b54f-414bd1d5a3d1", "address": "fa:16:3e:2d:be:d8", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31239db7-86", "ovs_interfaceid": "31239db7-86bd-4d24-b54f-414bd1d5a3d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.729706] env[69927]: DEBUG nova.compute.manager [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1046.735166] env[69927]: INFO nova.scheduler.client.report [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Deleted allocations for instance c87680be-227e-4a3e-92d3-c2310623bfe4 [ 1046.765450] env[69927]: INFO nova.compute.manager [-] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Took 1.51 seconds to deallocate network for instance. [ 1046.767945] env[69927]: DEBUG nova.virt.hardware [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1046.768208] env[69927]: DEBUG nova.virt.hardware [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1046.768395] env[69927]: DEBUG nova.virt.hardware [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1046.768617] env[69927]: DEBUG nova.virt.hardware [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1046.768843] env[69927]: DEBUG nova.virt.hardware [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1046.769011] env[69927]: DEBUG nova.virt.hardware [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1046.769266] env[69927]: DEBUG nova.virt.hardware [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1046.769432] env[69927]: DEBUG nova.virt.hardware [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1046.769656] env[69927]: DEBUG nova.virt.hardware [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1046.769845] env[69927]: DEBUG nova.virt.hardware [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1046.770031] env[69927]: DEBUG nova.virt.hardware [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1046.772493] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fdf0617-3c8e-4ec5-bf27-48c0a88a6c02 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.785044] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aea0875-7444-42c1-9867-1e2a01c94d45 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.893321] env[69927]: INFO nova.compute.manager [-] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Took 1.74 seconds to deallocate network for instance. [ 1047.004172] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': task-4096372, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.159432] env[69927]: DEBUG oslo_concurrency.lockutils [req-f6f9129c-d483-431b-a32a-b42b94fc1801 req-fe542fb2-dbdd-4dde-8223-edd11226a712 service nova] Releasing lock "refresh_cache-ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.226512] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "refresh_cache-20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.226512] env[69927]: DEBUG nova.compute.manager [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Instance network_info: |[{"id": "31239db7-86bd-4d24-b54f-414bd1d5a3d1", "address": "fa:16:3e:2d:be:d8", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31239db7-86", "ovs_interfaceid": "31239db7-86bd-4d24-b54f-414bd1d5a3d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1047.226512] env[69927]: DEBUG oslo_concurrency.lockutils [req-c3d9a2a7-f7db-4232-bb6e-8877eadc1712 req-23bf612c-ec98-4828-a161-e8acc9840de2 service nova] Acquired lock "refresh_cache-20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1047.226804] env[69927]: DEBUG nova.network.neutron [req-c3d9a2a7-f7db-4232-bb6e-8877eadc1712 req-23bf612c-ec98-4828-a161-e8acc9840de2 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Refreshing network info cache for port 31239db7-86bd-4d24-b54f-414bd1d5a3d1 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1047.228044] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:be:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5b21ab10-d886-4453-9472-9e11fb3c450d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '31239db7-86bd-4d24-b54f-414bd1d5a3d1', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1047.236490] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1047.237413] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1047.237698] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d0d61a87-bffe-436b-b936-0469e04aaa74 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.258620] env[69927]: DEBUG oslo_concurrency.lockutils [None req-600fdf42-cd53-4bbe-a2d4-05ff0505b496 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Lock "c87680be-227e-4a3e-92d3-c2310623bfe4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.197s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.267069] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1047.267069] env[69927]: value = "task-4096373" [ 1047.267069] env[69927]: _type = "Task" [ 1047.267069] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.277118] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096373, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.280670] env[69927]: DEBUG oslo_concurrency.lockutils [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.403495] env[69927]: DEBUG oslo_concurrency.lockutils [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.503804] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': task-4096372, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.000216} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.504134] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 3936a3db-4afa-4a37-9d63-8c18b6b72c72/3936a3db-4afa-4a37-9d63-8c18b6b72c72.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1047.504393] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1047.504701] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3a81fc46-20a3-452b-ae37-b4424fa5fcd1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.513256] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Waiting for the task: (returnval){ [ 1047.513256] env[69927]: value = "task-4096374" [ 1047.513256] env[69927]: _type = "Task" [ 1047.513256] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.522908] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': task-4096374, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.717494] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f2116341-341f-44bc-aa7a-4affa13a824e tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.723792] env[69927]: DEBUG oslo_concurrency.lockutils [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.472s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.723792] env[69927]: DEBUG nova.objects.instance [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lazy-loading 'resources' on Instance uuid 67e00c40-35b6-4a9f-9505-19b804e78c04 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1047.725810] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquiring lock "0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.731052] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.775777] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096373, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.958960] env[69927]: DEBUG nova.network.neutron [req-c3d9a2a7-f7db-4232-bb6e-8877eadc1712 req-23bf612c-ec98-4828-a161-e8acc9840de2 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Updated VIF entry in instance network info cache for port 31239db7-86bd-4d24-b54f-414bd1d5a3d1. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1047.959406] env[69927]: DEBUG nova.network.neutron [req-c3d9a2a7-f7db-4232-bb6e-8877eadc1712 req-23bf612c-ec98-4828-a161-e8acc9840de2 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Updating instance_info_cache with network_info: [{"id": "31239db7-86bd-4d24-b54f-414bd1d5a3d1", "address": "fa:16:3e:2d:be:d8", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31239db7-86", "ovs_interfaceid": "31239db7-86bd-4d24-b54f-414bd1d5a3d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.025799] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': task-4096374, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081001} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.025799] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1048.026073] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7906deac-5335-4b50-a1ba-275db3c53645 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.051169] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] 3936a3db-4afa-4a37-9d63-8c18b6b72c72/3936a3db-4afa-4a37-9d63-8c18b6b72c72.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1048.052152] env[69927]: DEBUG nova.network.neutron [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Successfully updated port: ace9865c-a7d5-41ad-abee-d70d3aa00dc0 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1048.057047] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0006e4b4-77ab-4be5-a9b9-91266b7325b0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.076877] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Waiting for the task: (returnval){ [ 1048.076877] env[69927]: value = "task-4096375" [ 1048.076877] env[69927]: _type = "Task" [ 1048.076877] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.085816] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': task-4096375, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.152386] env[69927]: DEBUG nova.compute.manager [req-dea0fa2f-d14c-4373-b596-ffec83964ee2 req-a71fec52-5f7c-42c6-aeb8-b509877662e0 service nova] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Received event network-vif-plugged-ace9865c-a7d5-41ad-abee-d70d3aa00dc0 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1048.152693] env[69927]: DEBUG oslo_concurrency.lockutils [req-dea0fa2f-d14c-4373-b596-ffec83964ee2 req-a71fec52-5f7c-42c6-aeb8-b509877662e0 service nova] Acquiring lock "0f5643d4-52f3-4cba-b71b-9c4370175e35-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.153014] env[69927]: DEBUG oslo_concurrency.lockutils [req-dea0fa2f-d14c-4373-b596-ffec83964ee2 req-a71fec52-5f7c-42c6-aeb8-b509877662e0 service nova] Lock "0f5643d4-52f3-4cba-b71b-9c4370175e35-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.153275] env[69927]: DEBUG oslo_concurrency.lockutils [req-dea0fa2f-d14c-4373-b596-ffec83964ee2 req-a71fec52-5f7c-42c6-aeb8-b509877662e0 service nova] Lock "0f5643d4-52f3-4cba-b71b-9c4370175e35-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.153558] env[69927]: DEBUG nova.compute.manager [req-dea0fa2f-d14c-4373-b596-ffec83964ee2 req-a71fec52-5f7c-42c6-aeb8-b509877662e0 service nova] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] No waiting events found dispatching network-vif-plugged-ace9865c-a7d5-41ad-abee-d70d3aa00dc0 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1048.153795] env[69927]: WARNING nova.compute.manager [req-dea0fa2f-d14c-4373-b596-ffec83964ee2 req-a71fec52-5f7c-42c6-aeb8-b509877662e0 service nova] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Received unexpected event network-vif-plugged-ace9865c-a7d5-41ad-abee-d70d3aa00dc0 for instance with vm_state building and task_state spawning. [ 1048.229847] env[69927]: DEBUG nova.compute.manager [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1048.282907] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096373, 'name': CreateVM_Task, 'duration_secs': 0.525562} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.283411] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1048.284515] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.284951] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1048.285341] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1048.285640] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8bc969fe-e6ea-43a3-ba76-a99bf224eff5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.296064] env[69927]: DEBUG oslo_vmware.api [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1048.296064] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52308947-ee8c-c914-c037-5e3e0320c05d" [ 1048.296064] env[69927]: _type = "Task" [ 1048.296064] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.317934] env[69927]: DEBUG oslo_vmware.api [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52308947-ee8c-c914-c037-5e3e0320c05d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.464020] env[69927]: DEBUG oslo_concurrency.lockutils [req-c3d9a2a7-f7db-4232-bb6e-8877eadc1712 req-23bf612c-ec98-4828-a161-e8acc9840de2 service nova] Releasing lock "refresh_cache-20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1048.464381] env[69927]: DEBUG nova.compute.manager [req-c3d9a2a7-f7db-4232-bb6e-8877eadc1712 req-23bf612c-ec98-4828-a161-e8acc9840de2 service nova] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Received event network-vif-deleted-1d1a9d58-33a0-4ef9-b472-1e66ed5b0450 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1048.464650] env[69927]: DEBUG nova.compute.manager [req-c3d9a2a7-f7db-4232-bb6e-8877eadc1712 req-23bf612c-ec98-4828-a161-e8acc9840de2 service nova] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Received event network-vif-deleted-3edeb122-1a25-4dcd-93fc-1dcf798a6da1 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1048.554838] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquiring lock "refresh_cache-0f5643d4-52f3-4cba-b71b-9c4370175e35" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.555293] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquired lock "refresh_cache-0f5643d4-52f3-4cba-b71b-9c4370175e35" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1048.555564] env[69927]: DEBUG nova.network.neutron [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1048.589738] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': task-4096375, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.646502] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceb2ec05-b650-47da-814e-a6c5a287921f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.654844] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83ca81e1-5154-4406-9ec9-b10cc2a0a5a2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.687225] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6fe0034-ef87-4257-941f-a0b4992c9ce5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.696283] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8922e9e-21be-4221-b671-3979f190f7e4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.714194] env[69927]: DEBUG nova.compute.provider_tree [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1048.749653] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.811439] env[69927]: DEBUG oslo_vmware.api [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52308947-ee8c-c914-c037-5e3e0320c05d, 'name': SearchDatastore_Task, 'duration_secs': 0.034786} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.811439] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1048.811647] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1048.811864] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.811978] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1048.812166] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1048.812430] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-624c96c3-66a9-4115-9692-4839b7158e04 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.817993] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "7554b5e2-dcc3-421f-9fe9-a309c9aa03b7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.818260] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "7554b5e2-dcc3-421f-9fe9-a309c9aa03b7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.818464] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "7554b5e2-dcc3-421f-9fe9-a309c9aa03b7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.818642] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "7554b5e2-dcc3-421f-9fe9-a309c9aa03b7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.819876] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "7554b5e2-dcc3-421f-9fe9-a309c9aa03b7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.820713] env[69927]: INFO nova.compute.manager [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Terminating instance [ 1048.831455] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1048.831656] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1048.832421] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47d1091f-fa89-45af-8dea-f316b446f94c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.839399] env[69927]: DEBUG oslo_vmware.api [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1048.839399] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528872f1-a4d9-c1fc-f828-6c0488d4874c" [ 1048.839399] env[69927]: _type = "Task" [ 1048.839399] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.850634] env[69927]: DEBUG oslo_vmware.api [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528872f1-a4d9-c1fc-f828-6c0488d4874c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.937271] env[69927]: DEBUG nova.compute.manager [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1048.938191] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30d313e8-403d-4dd7-8710-707ddea017de {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.966146] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Acquiring lock "c3a531fd-647c-43b6-9d3d-fc6ecbc2445e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.966465] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Lock "c3a531fd-647c-43b6-9d3d-fc6ecbc2445e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.966682] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Acquiring lock "c3a531fd-647c-43b6-9d3d-fc6ecbc2445e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.966857] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Lock "c3a531fd-647c-43b6-9d3d-fc6ecbc2445e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.967138] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Lock "c3a531fd-647c-43b6-9d3d-fc6ecbc2445e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.969848] env[69927]: INFO nova.compute.manager [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Terminating instance [ 1049.087215] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': task-4096375, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.108513] env[69927]: DEBUG nova.network.neutron [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1049.217947] env[69927]: DEBUG nova.scheduler.client.report [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1049.329022] env[69927]: DEBUG nova.compute.manager [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1049.329022] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1049.329022] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba72378c-426c-4f2a-a1f3-0a8c78896ba4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.337335] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1049.337732] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-201ff569-b471-445d-bcf9-464dc6fecaba {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.354018] env[69927]: DEBUG oslo_vmware.api [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528872f1-a4d9-c1fc-f828-6c0488d4874c, 'name': SearchDatastore_Task, 'duration_secs': 0.032092} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.355724] env[69927]: DEBUG oslo_vmware.api [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1049.355724] env[69927]: value = "task-4096376" [ 1049.355724] env[69927]: _type = "Task" [ 1049.355724] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.356235] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ec908fe-db7b-4d94-aeba-86ed3bab5c87 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.369241] env[69927]: DEBUG oslo_vmware.api [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1049.369241] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5234509d-cfbc-0651-3037-9643fee90d0b" [ 1049.369241] env[69927]: _type = "Task" [ 1049.369241] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.375024] env[69927]: DEBUG oslo_vmware.api [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096376, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.376543] env[69927]: DEBUG nova.network.neutron [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Updating instance_info_cache with network_info: [{"id": "ace9865c-a7d5-41ad-abee-d70d3aa00dc0", "address": "fa:16:3e:99:0a:35", "network": {"id": "93ea9667-6d8f-46ae-8c4a-af58996dbb46", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1534634422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babf071cfc564f7d83c28d449c774840", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapace9865c-a7", "ovs_interfaceid": "ace9865c-a7d5-41ad-abee-d70d3aa00dc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1049.389052] env[69927]: DEBUG oslo_vmware.api [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5234509d-cfbc-0651-3037-9643fee90d0b, 'name': SearchDatastore_Task, 'duration_secs': 0.011756} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.390298] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1049.390748] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 20ac32b7-51fc-40bf-a667-2aeb6c8c7648/20ac32b7-51fc-40bf-a667-2aeb6c8c7648.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1049.391489] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-00d21081-7fcd-4ae6-b366-054a7c9176d3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.400838] env[69927]: DEBUG oslo_vmware.api [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1049.400838] env[69927]: value = "task-4096377" [ 1049.400838] env[69927]: _type = "Task" [ 1049.400838] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.409944] env[69927]: DEBUG oslo_vmware.api [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096377, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.452114] env[69927]: INFO nova.compute.manager [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] instance snapshotting [ 1049.452858] env[69927]: DEBUG nova.objects.instance [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lazy-loading 'flavor' on Instance uuid 4b7934f8-2c97-480b-8af7-f09f6819e2b6 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1049.476976] env[69927]: DEBUG nova.compute.manager [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1049.477608] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1049.478656] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3381cfa9-2a24-48fb-bbcf-143fa1a67f6c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.488558] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1049.488884] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d5c468d-fe27-49ad-ae86-d9cc19a2511d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.497354] env[69927]: DEBUG oslo_vmware.api [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Waiting for the task: (returnval){ [ 1049.497354] env[69927]: value = "task-4096378" [ 1049.497354] env[69927]: _type = "Task" [ 1049.497354] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.507935] env[69927]: DEBUG oslo_vmware.api [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096378, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.590814] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': task-4096375, 'name': ReconfigVM_Task, 'duration_secs': 1.16641} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.591110] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Reconfigured VM instance instance-00000050 to attach disk [datastore2] 3936a3db-4afa-4a37-9d63-8c18b6b72c72/3936a3db-4afa-4a37-9d63-8c18b6b72c72.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1049.591878] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f9d3bdcb-e30e-4c79-93ac-6c537080c161 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.599926] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Waiting for the task: (returnval){ [ 1049.599926] env[69927]: value = "task-4096379" [ 1049.599926] env[69927]: _type = "Task" [ 1049.599926] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.611852] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': task-4096379, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.726191] env[69927]: DEBUG oslo_concurrency.lockutils [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.003s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.728866] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.597s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.729141] env[69927]: DEBUG nova.objects.instance [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Lazy-loading 'resources' on Instance uuid 93d19a66-f00e-4fa8-9eed-32035b020ba2 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1049.762733] env[69927]: INFO nova.scheduler.client.report [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Deleted allocations for instance 67e00c40-35b6-4a9f-9505-19b804e78c04 [ 1049.872588] env[69927]: DEBUG oslo_vmware.api [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096376, 'name': PowerOffVM_Task, 'duration_secs': 0.234559} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.872799] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1049.873055] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1049.873224] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7072c00a-7973-4763-9fac-0b33822c7501 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.883084] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Releasing lock "refresh_cache-0f5643d4-52f3-4cba-b71b-9c4370175e35" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1049.883512] env[69927]: DEBUG nova.compute.manager [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Instance network_info: |[{"id": "ace9865c-a7d5-41ad-abee-d70d3aa00dc0", "address": "fa:16:3e:99:0a:35", "network": {"id": "93ea9667-6d8f-46ae-8c4a-af58996dbb46", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1534634422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babf071cfc564f7d83c28d449c774840", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapace9865c-a7", "ovs_interfaceid": "ace9865c-a7d5-41ad-abee-d70d3aa00dc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1049.884323] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:0a:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69d412f5-01a9-4fed-8488-7b767a13a653', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ace9865c-a7d5-41ad-abee-d70d3aa00dc0', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1049.892395] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1049.892653] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1049.892874] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fdc61964-38fe-46d4-ab92-1ea7145f7fe5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.917390] env[69927]: DEBUG oslo_vmware.api [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096377, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.918971] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1049.918971] env[69927]: value = "task-4096381" [ 1049.918971] env[69927]: _type = "Task" [ 1049.918971] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.927977] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096381, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.959889] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56047057-b10d-47a1-98e5-d207aea29bf1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.965175] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1049.965409] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1049.965595] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Deleting the datastore file [datastore2] 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1049.965846] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7a300e12-04f4-426d-9cc0-2d27ab52874a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.984759] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc53e145-9f7e-41e5-933c-2ae66a68e5d1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.989240] env[69927]: DEBUG oslo_vmware.api [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1049.989240] env[69927]: value = "task-4096382" [ 1049.989240] env[69927]: _type = "Task" [ 1049.989240] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.005316] env[69927]: DEBUG oslo_vmware.api [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096382, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.012036] env[69927]: DEBUG oslo_vmware.api [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096378, 'name': PowerOffVM_Task, 'duration_secs': 0.203678} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.012036] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1050.012036] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1050.012313] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-12651736-ace4-42bc-9e25-2531bc1bc01a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.107665] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1050.107916] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1050.108163] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Deleting the datastore file [datastore1] c3a531fd-647c-43b6-9d3d-fc6ecbc2445e {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1050.108857] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e3d42d7-3b41-4667-b380-c828aa839d88 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.114401] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': task-4096379, 'name': Rename_Task, 'duration_secs': 0.466848} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.115146] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1050.115444] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-57457903-820e-4930-897f-c3ff119b4844 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.120050] env[69927]: DEBUG oslo_vmware.api [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Waiting for the task: (returnval){ [ 1050.120050] env[69927]: value = "task-4096384" [ 1050.120050] env[69927]: _type = "Task" [ 1050.120050] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.125244] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Waiting for the task: (returnval){ [ 1050.125244] env[69927]: value = "task-4096385" [ 1050.125244] env[69927]: _type = "Task" [ 1050.125244] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.132163] env[69927]: DEBUG oslo_vmware.api [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096384, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.139508] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': task-4096385, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.190628] env[69927]: DEBUG nova.compute.manager [req-1ee79e33-6a94-4f42-9852-17fd3f3b8804 req-59fdbae3-e637-47ed-b95e-bb5e33c09086 service nova] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Received event network-changed-ace9865c-a7d5-41ad-abee-d70d3aa00dc0 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1050.190628] env[69927]: DEBUG nova.compute.manager [req-1ee79e33-6a94-4f42-9852-17fd3f3b8804 req-59fdbae3-e637-47ed-b95e-bb5e33c09086 service nova] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Refreshing instance network info cache due to event network-changed-ace9865c-a7d5-41ad-abee-d70d3aa00dc0. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1050.190628] env[69927]: DEBUG oslo_concurrency.lockutils [req-1ee79e33-6a94-4f42-9852-17fd3f3b8804 req-59fdbae3-e637-47ed-b95e-bb5e33c09086 service nova] Acquiring lock "refresh_cache-0f5643d4-52f3-4cba-b71b-9c4370175e35" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.190964] env[69927]: DEBUG oslo_concurrency.lockutils [req-1ee79e33-6a94-4f42-9852-17fd3f3b8804 req-59fdbae3-e637-47ed-b95e-bb5e33c09086 service nova] Acquired lock "refresh_cache-0f5643d4-52f3-4cba-b71b-9c4370175e35" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1050.190964] env[69927]: DEBUG nova.network.neutron [req-1ee79e33-6a94-4f42-9852-17fd3f3b8804 req-59fdbae3-e637-47ed-b95e-bb5e33c09086 service nova] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Refreshing network info cache for port ace9865c-a7d5-41ad-abee-d70d3aa00dc0 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1050.270916] env[69927]: DEBUG oslo_concurrency.lockutils [None req-993541db-c890-40c8-85ea-9b2a8228979d tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "67e00c40-35b6-4a9f-9505-19b804e78c04" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.654s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.419613] env[69927]: DEBUG oslo_vmware.api [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096377, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.581367} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.423183] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 20ac32b7-51fc-40bf-a667-2aeb6c8c7648/20ac32b7-51fc-40bf-a667-2aeb6c8c7648.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1050.423530] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1050.423776] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fb81d775-d591-474e-a211-a044dc538f59 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.436226] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096381, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.436226] env[69927]: DEBUG oslo_vmware.api [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1050.436226] env[69927]: value = "task-4096386" [ 1050.436226] env[69927]: _type = "Task" [ 1050.436226] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.445811] env[69927]: DEBUG oslo_vmware.api [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096386, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.499735] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Creating Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1050.500156] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4d400408-6de6-4fbd-bb72-08c48e3e2a48 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.506542] env[69927]: DEBUG oslo_vmware.api [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096382, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193862} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.507449] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1050.507740] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1050.508106] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1050.508309] env[69927]: INFO nova.compute.manager [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1050.508604] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1050.509320] env[69927]: DEBUG nova.compute.manager [-] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1050.509320] env[69927]: DEBUG nova.network.neutron [-] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1050.512910] env[69927]: DEBUG oslo_vmware.api [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1050.512910] env[69927]: value = "task-4096387" [ 1050.512910] env[69927]: _type = "Task" [ 1050.512910] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.526511] env[69927]: DEBUG oslo_vmware.api [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096387, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.634441] env[69927]: DEBUG oslo_vmware.api [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Task: {'id': task-4096384, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.373898} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.634750] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1050.634934] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1050.635131] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1050.635308] env[69927]: INFO nova.compute.manager [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1050.635641] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1050.636324] env[69927]: DEBUG nova.compute.manager [-] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1050.636324] env[69927]: DEBUG nova.network.neutron [-] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1050.642628] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': task-4096385, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.647862] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df460fe6-20c3-40ba-96b7-3e8508704a83 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.655840] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9bb2e5e-d06f-4e3e-9acc-ddcf7f65d7b5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.689214] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8721eaab-a6d8-432b-ba73-b24e67eebd0a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.701646] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5efe171c-ccf7-4b81-87a3-299d1c7ef03f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.725659] env[69927]: DEBUG nova.compute.provider_tree [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1050.938597] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096381, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.953091] env[69927]: DEBUG oslo_vmware.api [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096386, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081971} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.953091] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1050.953669] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a31a253-6230-4899-930d-d79f02b0c7a5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.989182] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 20ac32b7-51fc-40bf-a667-2aeb6c8c7648/20ac32b7-51fc-40bf-a667-2aeb6c8c7648.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1050.989610] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-918f9e51-b5c3-4dc8-b08c-4cb80ecff20a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.012831] env[69927]: DEBUG oslo_vmware.api [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1051.012831] env[69927]: value = "task-4096388" [ 1051.012831] env[69927]: _type = "Task" [ 1051.012831] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.034765] env[69927]: DEBUG oslo_vmware.api [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096388, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.035116] env[69927]: DEBUG oslo_vmware.api [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096387, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.039069] env[69927]: DEBUG nova.network.neutron [req-1ee79e33-6a94-4f42-9852-17fd3f3b8804 req-59fdbae3-e637-47ed-b95e-bb5e33c09086 service nova] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Updated VIF entry in instance network info cache for port ace9865c-a7d5-41ad-abee-d70d3aa00dc0. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1051.039460] env[69927]: DEBUG nova.network.neutron [req-1ee79e33-6a94-4f42-9852-17fd3f3b8804 req-59fdbae3-e637-47ed-b95e-bb5e33c09086 service nova] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Updating instance_info_cache with network_info: [{"id": "ace9865c-a7d5-41ad-abee-d70d3aa00dc0", "address": "fa:16:3e:99:0a:35", "network": {"id": "93ea9667-6d8f-46ae-8c4a-af58996dbb46", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1534634422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babf071cfc564f7d83c28d449c774840", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapace9865c-a7", "ovs_interfaceid": "ace9865c-a7d5-41ad-abee-d70d3aa00dc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.138914] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': task-4096385, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.178170] env[69927]: DEBUG nova.compute.manager [req-3f7d90a0-6278-462a-bab0-87f3bc9a3cee req-f0d4de70-acf4-470c-96d6-886c8b81865f service nova] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Received event network-vif-deleted-afbb315a-bc71-453c-9b30-ef008a6e400e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1051.178170] env[69927]: INFO nova.compute.manager [req-3f7d90a0-6278-462a-bab0-87f3bc9a3cee req-f0d4de70-acf4-470c-96d6-886c8b81865f service nova] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Neutron deleted interface afbb315a-bc71-453c-9b30-ef008a6e400e; detaching it from the instance and deleting it from the info cache [ 1051.178170] env[69927]: DEBUG nova.network.neutron [req-3f7d90a0-6278-462a-bab0-87f3bc9a3cee req-f0d4de70-acf4-470c-96d6-886c8b81865f service nova] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.229725] env[69927]: DEBUG nova.scheduler.client.report [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1051.318943] env[69927]: DEBUG oslo_concurrency.lockutils [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "a9a62523-50fb-44b2-bfc8-9c6664dbf050" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.319441] env[69927]: DEBUG oslo_concurrency.lockutils [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "a9a62523-50fb-44b2-bfc8-9c6664dbf050" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.319606] env[69927]: DEBUG oslo_concurrency.lockutils [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "a9a62523-50fb-44b2-bfc8-9c6664dbf050-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.319688] env[69927]: DEBUG oslo_concurrency.lockutils [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "a9a62523-50fb-44b2-bfc8-9c6664dbf050-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.319864] env[69927]: DEBUG oslo_concurrency.lockutils [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "a9a62523-50fb-44b2-bfc8-9c6664dbf050-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.322202] env[69927]: INFO nova.compute.manager [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Terminating instance [ 1051.437426] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096381, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.526861] env[69927]: DEBUG oslo_vmware.api [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096388, 'name': ReconfigVM_Task, 'duration_secs': 0.416952} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.526861] env[69927]: DEBUG nova.network.neutron [-] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.528333] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 20ac32b7-51fc-40bf-a667-2aeb6c8c7648/20ac32b7-51fc-40bf-a667-2aeb6c8c7648.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1051.544180] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-55e28dd2-3cd4-43e3-a0a8-7197baff7f99 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.544654] env[69927]: DEBUG oslo_vmware.api [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096387, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.545060] env[69927]: DEBUG oslo_concurrency.lockutils [req-1ee79e33-6a94-4f42-9852-17fd3f3b8804 req-59fdbae3-e637-47ed-b95e-bb5e33c09086 service nova] Releasing lock "refresh_cache-0f5643d4-52f3-4cba-b71b-9c4370175e35" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1051.554022] env[69927]: DEBUG oslo_vmware.api [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1051.554022] env[69927]: value = "task-4096389" [ 1051.554022] env[69927]: _type = "Task" [ 1051.554022] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.567111] env[69927]: DEBUG oslo_vmware.api [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096389, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.639552] env[69927]: DEBUG oslo_vmware.api [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': task-4096385, 'name': PowerOnVM_Task, 'duration_secs': 1.238277} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.639870] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1051.640138] env[69927]: INFO nova.compute.manager [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Took 10.20 seconds to spawn the instance on the hypervisor. [ 1051.640425] env[69927]: DEBUG nova.compute.manager [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1051.641294] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd0cb03a-c33f-49fb-acdd-371a316c1d69 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.682559] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7e229731-0ee2-480c-a000-f7298e4d1a1d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.694371] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e8ad65-84e7-417a-9b6b-8f20e28c80bb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.709285] env[69927]: DEBUG nova.network.neutron [-] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.733765] env[69927]: DEBUG nova.compute.manager [req-3f7d90a0-6278-462a-bab0-87f3bc9a3cee req-f0d4de70-acf4-470c-96d6-886c8b81865f service nova] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Detach interface failed, port_id=afbb315a-bc71-453c-9b30-ef008a6e400e, reason: Instance c3a531fd-647c-43b6-9d3d-fc6ecbc2445e could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1051.738356] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.009s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.741264] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.816s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.741741] env[69927]: DEBUG nova.objects.instance [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lazy-loading 'pci_requests' on Instance uuid a2b1684f-82af-42fc-925e-db36f31cfe63 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1051.763746] env[69927]: INFO nova.scheduler.client.report [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Deleted allocations for instance 93d19a66-f00e-4fa8-9eed-32035b020ba2 [ 1051.826973] env[69927]: DEBUG nova.compute.manager [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1051.827243] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1051.828413] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc01bb7-bf5d-4d76-a61d-8cd480f91432 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.836730] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1051.836992] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-54c31f34-b330-4100-ae6c-0d92ab62f204 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.845075] env[69927]: DEBUG oslo_vmware.api [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 1051.845075] env[69927]: value = "task-4096390" [ 1051.845075] env[69927]: _type = "Task" [ 1051.845075] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.853242] env[69927]: DEBUG oslo_vmware.api [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096390, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.936052] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096381, 'name': CreateVM_Task, 'duration_secs': 1.671821} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.936285] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1051.936881] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.937148] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1051.937561] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1051.937839] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-446b2921-b4aa-47a8-a9fe-e90e4ac9676d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.944228] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 1051.944228] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e73644-eeb8-6935-4624-85014dc3d5eb" [ 1051.944228] env[69927]: _type = "Task" [ 1051.944228] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.953975] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e73644-eeb8-6935-4624-85014dc3d5eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.030055] env[69927]: DEBUG oslo_vmware.api [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096387, 'name': CreateSnapshot_Task, 'duration_secs': 1.199506} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.030276] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Created Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1052.031052] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b4dbe40-6b02-4d31-b089-9ef1caa41db9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.042068] env[69927]: INFO nova.compute.manager [-] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Took 1.41 seconds to deallocate network for instance. [ 1052.064533] env[69927]: DEBUG oslo_vmware.api [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096389, 'name': Rename_Task, 'duration_secs': 0.193912} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.064533] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1052.064750] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-24bd5ae9-0ff1-4da4-905b-b4283e8c1823 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.073262] env[69927]: DEBUG oslo_vmware.api [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1052.073262] env[69927]: value = "task-4096391" [ 1052.073262] env[69927]: _type = "Task" [ 1052.073262] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.082710] env[69927]: DEBUG oslo_vmware.api [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096391, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.163297] env[69927]: INFO nova.compute.manager [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Took 47.67 seconds to build instance. [ 1052.214754] env[69927]: INFO nova.compute.manager [-] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Took 1.70 seconds to deallocate network for instance. [ 1052.246212] env[69927]: DEBUG nova.objects.instance [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lazy-loading 'numa_topology' on Instance uuid a2b1684f-82af-42fc-925e-db36f31cfe63 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1052.273623] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a4e57c96-b9f5-437b-bdda-ff554412a6e1 tempest-ServersTestMultiNic-1807665099 tempest-ServersTestMultiNic-1807665099-project-member] Lock "93d19a66-f00e-4fa8-9eed-32035b020ba2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.292s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.312525] env[69927]: DEBUG nova.compute.manager [req-0d47e0da-c8fa-44fa-9699-aa273f212cc0 req-20d3a788-efab-4467-8353-e6ca72a737bd service nova] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Received event network-vif-deleted-f0303e15-89fc-4eb7-825c-9b0cea4b5718 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1052.355942] env[69927]: DEBUG oslo_vmware.api [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096390, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.454881] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e73644-eeb8-6935-4624-85014dc3d5eb, 'name': SearchDatastore_Task, 'duration_secs': 0.037998} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.455384] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1052.456021] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1052.456021] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.456021] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1052.456201] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1052.456454] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-075922d1-c2e0-4da2-97a1-3acafd17529d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.467672] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1052.468064] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1052.468639] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58e5a4f1-30d1-487a-b4b6-3670a9508ebe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.475356] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 1052.475356] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52120911-3a97-d3fa-e0d4-a7a53cbd2751" [ 1052.475356] env[69927]: _type = "Task" [ 1052.475356] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.484343] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52120911-3a97-d3fa-e0d4-a7a53cbd2751, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.552374] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Creating linked-clone VM from snapshot {{(pid=69927) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1052.553605] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1052.553926] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3dc544c8-9c0a-432e-84cf-30c38981137f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.563853] env[69927]: DEBUG oslo_vmware.api [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1052.563853] env[69927]: value = "task-4096392" [ 1052.563853] env[69927]: _type = "Task" [ 1052.563853] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.577721] env[69927]: DEBUG oslo_vmware.api [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096392, 'name': CloneVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.587362] env[69927]: DEBUG oslo_vmware.api [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096391, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.665513] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9a022410-2994-48f2-8bfe-c824da649e23 tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Lock "3936a3db-4afa-4a37-9d63-8c18b6b72c72" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.202s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.724297] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1052.749270] env[69927]: INFO nova.compute.claims [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1052.858134] env[69927]: DEBUG oslo_vmware.api [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096390, 'name': PowerOffVM_Task, 'duration_secs': 0.920972} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.858486] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1052.858666] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1052.858933] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-07b970fc-6fc1-419b-9a3a-a69bc6b8cf70 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.943358] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1052.943599] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1052.943835] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Deleting the datastore file [datastore2] a9a62523-50fb-44b2-bfc8-9c6664dbf050 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1052.944081] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-423b1fbe-3a45-49ae-96aa-c078843cf67e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.952673] env[69927]: DEBUG oslo_vmware.api [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 1052.952673] env[69927]: value = "task-4096394" [ 1052.952673] env[69927]: _type = "Task" [ 1052.952673] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.963841] env[69927]: DEBUG oslo_vmware.api [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096394, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.986727] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52120911-3a97-d3fa-e0d4-a7a53cbd2751, 'name': SearchDatastore_Task, 'duration_secs': 0.01751} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.987993] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff29883f-3ff1-4297-8dd2-7c9b8af651cb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.995702] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 1052.995702] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5223debd-82db-a653-af39-a7bc6e3f3869" [ 1052.995702] env[69927]: _type = "Task" [ 1052.995702] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.006528] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5223debd-82db-a653-af39-a7bc6e3f3869, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.079187] env[69927]: DEBUG oslo_vmware.api [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096392, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.090495] env[69927]: DEBUG oslo_vmware.api [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096391, 'name': PowerOnVM_Task, 'duration_secs': 0.941153} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.090773] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1053.091040] env[69927]: INFO nova.compute.manager [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Took 8.94 seconds to spawn the instance on the hypervisor. [ 1053.091218] env[69927]: DEBUG nova.compute.manager [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1053.092471] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d82ede12-b95f-465d-83f0-a82c3afbdc34 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.464174] env[69927]: DEBUG oslo_vmware.api [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096394, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192253} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.464472] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1053.464746] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1053.464830] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1053.465040] env[69927]: INFO nova.compute.manager [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1053.465328] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1053.465579] env[69927]: DEBUG nova.compute.manager [-] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1053.465701] env[69927]: DEBUG nova.network.neutron [-] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1053.510442] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5223debd-82db-a653-af39-a7bc6e3f3869, 'name': SearchDatastore_Task, 'duration_secs': 0.013171} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.510753] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1053.511030] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 0f5643d4-52f3-4cba-b71b-9c4370175e35/0f5643d4-52f3-4cba-b71b-9c4370175e35.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1053.511311] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e040e7ef-5989-4de8-b773-ee2f4abe5b40 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.519306] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 1053.519306] env[69927]: value = "task-4096395" [ 1053.519306] env[69927]: _type = "Task" [ 1053.519306] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.530137] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096395, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.556880] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Acquiring lock "3936a3db-4afa-4a37-9d63-8c18b6b72c72" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1053.557471] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Lock "3936a3db-4afa-4a37-9d63-8c18b6b72c72" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1053.557770] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Acquiring lock "3936a3db-4afa-4a37-9d63-8c18b6b72c72-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1053.558052] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Lock "3936a3db-4afa-4a37-9d63-8c18b6b72c72-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1053.558292] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Lock "3936a3db-4afa-4a37-9d63-8c18b6b72c72-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1053.560976] env[69927]: INFO nova.compute.manager [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Terminating instance [ 1053.579470] env[69927]: DEBUG oslo_vmware.api [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096392, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.616037] env[69927]: INFO nova.compute.manager [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Took 47.83 seconds to build instance. [ 1054.033665] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096395, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.075616] env[69927]: DEBUG nova.compute.manager [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1054.075797] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1054.076876] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-245349ef-a6f1-4ee0-be81-0807417054b7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.092219] env[69927]: DEBUG oslo_vmware.api [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096392, 'name': CloneVM_Task} progress is 95%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.095999] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1054.096101] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dad8501a-3635-442c-a48e-9ecefdd24efd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.108352] env[69927]: DEBUG oslo_vmware.api [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Waiting for the task: (returnval){ [ 1054.108352] env[69927]: value = "task-4096396" [ 1054.108352] env[69927]: _type = "Task" [ 1054.108352] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.124906] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b6c3bb3c-9452-4aa3-9f4d-693ce47d7ed2 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.429s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.129382] env[69927]: DEBUG oslo_vmware.api [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': task-4096396, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.204516] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e39ef04-4ca7-4cae-adc2-9248f2471286 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.213304] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2167001-9381-4890-9aea-505594aff843 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.252066] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f51a576-3635-4419-8793-429aefd3a0a7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.262163] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e32afd2-1c58-4d33-a577-4a3fe8f23458 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.282397] env[69927]: DEBUG nova.compute.provider_tree [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1054.376470] env[69927]: DEBUG nova.network.neutron [-] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1054.486748] env[69927]: DEBUG nova.compute.manager [req-db13888f-feaa-4a71-affd-6858cd0c4a69 req-0bafe825-75d2-43c0-8549-dae017aca319 service nova] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Received event network-vif-deleted-4b7f5764-23c4-40f8-a618-0c608e0e987d {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1054.532613] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096395, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.564465} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.532947] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 0f5643d4-52f3-4cba-b71b-9c4370175e35/0f5643d4-52f3-4cba-b71b-9c4370175e35.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1054.533218] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1054.533514] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1bda8dc1-06da-47a6-8580-af44cbd38639 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.544242] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 1054.544242] env[69927]: value = "task-4096397" [ 1054.544242] env[69927]: _type = "Task" [ 1054.544242] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.555250] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096397, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.576907] env[69927]: DEBUG oslo_vmware.api [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096392, 'name': CloneVM_Task, 'duration_secs': 1.886263} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.577300] env[69927]: INFO nova.virt.vmwareapi.vmops [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Created linked-clone VM from snapshot [ 1054.578017] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-360a99ef-f904-4bbd-bb2c-cdba20115ce8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.589014] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Uploading image f57cf18a-f065-4e7f-a53c-8cc53c2f655f {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1054.620948] env[69927]: DEBUG oslo_vmware.api [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': task-4096396, 'name': PowerOffVM_Task, 'duration_secs': 0.246937} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.622115] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1054.623055] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1054.623055] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2cd6f362-ac2f-49cf-b4ea-5e6171f7dbd1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.638777] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1054.638777] env[69927]: value = "vm-811520" [ 1054.638777] env[69927]: _type = "VirtualMachine" [ 1054.638777] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1054.639210] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-21f9df81-69c2-4110-9a05-790eb0a685ff {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.652300] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lease: (returnval){ [ 1054.652300] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523c34da-aaa8-579c-c669-850eac0c028b" [ 1054.652300] env[69927]: _type = "HttpNfcLease" [ 1054.652300] env[69927]: } obtained for exporting VM: (result){ [ 1054.652300] env[69927]: value = "vm-811520" [ 1054.652300] env[69927]: _type = "VirtualMachine" [ 1054.652300] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1054.652734] env[69927]: DEBUG oslo_vmware.api [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the lease: (returnval){ [ 1054.652734] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523c34da-aaa8-579c-c669-850eac0c028b" [ 1054.652734] env[69927]: _type = "HttpNfcLease" [ 1054.652734] env[69927]: } to be ready. {{(pid=69927) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1054.666089] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1054.666089] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523c34da-aaa8-579c-c669-850eac0c028b" [ 1054.666089] env[69927]: _type = "HttpNfcLease" [ 1054.666089] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1054.732175] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1054.732531] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1054.733068] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Deleting the datastore file [datastore2] 3936a3db-4afa-4a37-9d63-8c18b6b72c72 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1054.733407] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-43503c11-4607-462b-a486-92fd8be2007e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.741457] env[69927]: DEBUG oslo_vmware.api [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Waiting for the task: (returnval){ [ 1054.741457] env[69927]: value = "task-4096400" [ 1054.741457] env[69927]: _type = "Task" [ 1054.741457] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.751831] env[69927]: DEBUG oslo_vmware.api [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': task-4096400, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.786562] env[69927]: DEBUG nova.scheduler.client.report [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1054.881359] env[69927]: INFO nova.compute.manager [-] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Took 1.42 seconds to deallocate network for instance. [ 1055.057485] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096397, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08235} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.057814] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1055.058724] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f71e4c4f-a42b-489c-bf90-81cc9ccd7c3a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.087391] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] 0f5643d4-52f3-4cba-b71b-9c4370175e35/0f5643d4-52f3-4cba-b71b-9c4370175e35.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1055.087908] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20c7351d-c579-4efa-a747-649aa8bef3ee {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.110331] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 1055.110331] env[69927]: value = "task-4096401" [ 1055.110331] env[69927]: _type = "Task" [ 1055.110331] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.119803] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096401, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.165300] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1055.165300] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523c34da-aaa8-579c-c669-850eac0c028b" [ 1055.165300] env[69927]: _type = "HttpNfcLease" [ 1055.165300] env[69927]: } is ready. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1055.165640] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1055.165640] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523c34da-aaa8-579c-c669-850eac0c028b" [ 1055.165640] env[69927]: _type = "HttpNfcLease" [ 1055.165640] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1055.166520] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed17caca-dffe-456d-89fb-5625ae3ad031 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.175806] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d7342f-e51a-9a5e-2620-ced9a2da660f/disk-0.vmdk from lease info. {{(pid=69927) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1055.176125] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d7342f-e51a-9a5e-2620-ced9a2da660f/disk-0.vmdk for reading. {{(pid=69927) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1055.253598] env[69927]: DEBUG oslo_vmware.api [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': task-4096400, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.287308] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7aecc997-d9f6-4acc-a5fa-554ee9aedbfb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.293400] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.552s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.298845] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.608s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.301595] env[69927]: INFO nova.compute.claims [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1055.390471] env[69927]: DEBUG oslo_concurrency.lockutils [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.407675] env[69927]: INFO nova.network.neutron [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Updating port 699de5cd-28f8-453d-8f0a-7856f2d6a2b0 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1055.620360] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096401, 'name': ReconfigVM_Task, 'duration_secs': 0.494977} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.620616] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Reconfigured VM instance instance-00000052 to attach disk [datastore2] 0f5643d4-52f3-4cba-b71b-9c4370175e35/0f5643d4-52f3-4cba-b71b-9c4370175e35.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1055.621321] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6b50a4eb-4ba1-403c-bebe-6ca97c8edcc8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.629594] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 1055.629594] env[69927]: value = "task-4096402" [ 1055.629594] env[69927]: _type = "Task" [ 1055.629594] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.640614] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096402, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.752902] env[69927]: DEBUG oslo_vmware.api [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Task: {'id': task-4096400, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.544916} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.753459] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1055.753618] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1055.753872] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1055.754153] env[69927]: INFO nova.compute.manager [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1055.754442] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1055.754782] env[69927]: DEBUG nova.compute.manager [-] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1055.754910] env[69927]: DEBUG nova.network.neutron [-] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1056.143036] env[69927]: DEBUG nova.compute.manager [req-c52b97d2-2e66-4639-b2ae-f86e7d2d5312 req-bdf43980-e67a-4344-acdd-cc65c9e8119d service nova] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Received event network-vif-deleted-62623986-fc5c-4de6-943f-7bdd5b400e04 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1056.147822] env[69927]: INFO nova.compute.manager [req-c52b97d2-2e66-4639-b2ae-f86e7d2d5312 req-bdf43980-e67a-4344-acdd-cc65c9e8119d service nova] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Neutron deleted interface 62623986-fc5c-4de6-943f-7bdd5b400e04; detaching it from the instance and deleting it from the info cache [ 1056.147822] env[69927]: DEBUG nova.network.neutron [req-c52b97d2-2e66-4639-b2ae-f86e7d2d5312 req-bdf43980-e67a-4344-acdd-cc65c9e8119d service nova] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.157021] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096402, 'name': Rename_Task, 'duration_secs': 0.170695} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.157021] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1056.157021] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-01a9de8f-9695-414d-8f96-a88efa392f02 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.168154] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 1056.168154] env[69927]: value = "task-4096403" [ 1056.168154] env[69927]: _type = "Task" [ 1056.168154] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.180505] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096403, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.546668] env[69927]: DEBUG nova.network.neutron [-] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.596614] env[69927]: DEBUG nova.compute.manager [req-a2c329dd-c7f4-4b67-9bd5-9134b2922fd2 req-f0fed008-50f6-444b-acfd-6b61b614fdb8 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Received event network-changed-31239db7-86bd-4d24-b54f-414bd1d5a3d1 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1056.596614] env[69927]: DEBUG nova.compute.manager [req-a2c329dd-c7f4-4b67-9bd5-9134b2922fd2 req-f0fed008-50f6-444b-acfd-6b61b614fdb8 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Refreshing instance network info cache due to event network-changed-31239db7-86bd-4d24-b54f-414bd1d5a3d1. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1056.596877] env[69927]: DEBUG oslo_concurrency.lockutils [req-a2c329dd-c7f4-4b67-9bd5-9134b2922fd2 req-f0fed008-50f6-444b-acfd-6b61b614fdb8 service nova] Acquiring lock "refresh_cache-20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.597144] env[69927]: DEBUG oslo_concurrency.lockutils [req-a2c329dd-c7f4-4b67-9bd5-9134b2922fd2 req-f0fed008-50f6-444b-acfd-6b61b614fdb8 service nova] Acquired lock "refresh_cache-20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1056.597385] env[69927]: DEBUG nova.network.neutron [req-a2c329dd-c7f4-4b67-9bd5-9134b2922fd2 req-f0fed008-50f6-444b-acfd-6b61b614fdb8 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Refreshing network info cache for port 31239db7-86bd-4d24-b54f-414bd1d5a3d1 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1056.649480] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9beafe30-6613-4134-af95-c16911cef55d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.663201] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1056.663928] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1056.671970] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d501919-a213-4ac9-bd7e-5da661c1cd44 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.708635] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096403, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.726369] env[69927]: DEBUG nova.compute.manager [req-c52b97d2-2e66-4639-b2ae-f86e7d2d5312 req-bdf43980-e67a-4344-acdd-cc65c9e8119d service nova] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Detach interface failed, port_id=62623986-fc5c-4de6-943f-7bdd5b400e04, reason: Instance 3936a3db-4afa-4a37-9d63-8c18b6b72c72 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1056.800873] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb48f624-16c8-4987-8d5b-575cc9cfb2fc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.810542] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc17ebd8-87e1-4262-9538-6f89d4a61423 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.848108] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a241e7-7a93-43c4-8c7b-f115affc098b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.855269] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9702020f-fefe-4b41-b56d-1bdfe845048e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.871811] env[69927]: DEBUG nova.compute.provider_tree [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1057.053179] env[69927]: INFO nova.compute.manager [-] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Took 1.30 seconds to deallocate network for instance. [ 1057.202041] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1057.202901] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1057.203108] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1057.203368] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1057.203487] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1057.203636] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1057.203775] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69927) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1057.204341] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager.update_available_resource {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1057.210284] env[69927]: DEBUG oslo_vmware.api [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096403, 'name': PowerOnVM_Task, 'duration_secs': 0.55031} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.211121] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1057.211403] env[69927]: INFO nova.compute.manager [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Took 10.48 seconds to spawn the instance on the hypervisor. [ 1057.211595] env[69927]: DEBUG nova.compute.manager [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1057.212687] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-394bd32c-859d-4398-8f73-6a10bc340a49 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.327923] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "refresh_cache-a2b1684f-82af-42fc-925e-db36f31cfe63" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.327923] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquired lock "refresh_cache-a2b1684f-82af-42fc-925e-db36f31cfe63" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1057.327923] env[69927]: DEBUG nova.network.neutron [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1057.375346] env[69927]: DEBUG nova.scheduler.client.report [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1057.564216] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.713157] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.738912] env[69927]: INFO nova.compute.manager [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Took 47.78 seconds to build instance. [ 1057.743619] env[69927]: DEBUG nova.network.neutron [req-a2c329dd-c7f4-4b67-9bd5-9134b2922fd2 req-f0fed008-50f6-444b-acfd-6b61b614fdb8 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Updated VIF entry in instance network info cache for port 31239db7-86bd-4d24-b54f-414bd1d5a3d1. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1057.744031] env[69927]: DEBUG nova.network.neutron [req-a2c329dd-c7f4-4b67-9bd5-9134b2922fd2 req-f0fed008-50f6-444b-acfd-6b61b614fdb8 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Updating instance_info_cache with network_info: [{"id": "31239db7-86bd-4d24-b54f-414bd1d5a3d1", "address": "fa:16:3e:2d:be:d8", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31239db7-86", "ovs_interfaceid": "31239db7-86bd-4d24-b54f-414bd1d5a3d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.886738] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.586s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.886738] env[69927]: DEBUG nova.compute.manager [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1057.888779] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.491s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.889179] env[69927]: DEBUG nova.objects.instance [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lazy-loading 'resources' on Instance uuid 9c00e485-fd59-4571-abd5-80ca5e3bac1b {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1058.173989] env[69927]: DEBUG nova.network.neutron [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Updating instance_info_cache with network_info: [{"id": "699de5cd-28f8-453d-8f0a-7856f2d6a2b0", "address": "fa:16:3e:87:8d:a8", "network": {"id": "4a19c4d6-f840-486a-b113-3ec076b5b34d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-437129762-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b7ae5270b0643e6b5720d4f2f765d74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "510d3c47-3615-43d5-aa5d-a279fd915e71", "external-id": "nsx-vlan-transportzone-436", "segmentation_id": 436, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap699de5cd-28", "ovs_interfaceid": "699de5cd-28f8-453d-8f0a-7856f2d6a2b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.241447] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08800c93-e07f-444a-8212-9b506e7758f4 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "0f5643d4-52f3-4cba-b71b-9c4370175e35" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.294s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.250030] env[69927]: DEBUG oslo_concurrency.lockutils [req-a2c329dd-c7f4-4b67-9bd5-9134b2922fd2 req-f0fed008-50f6-444b-acfd-6b61b614fdb8 service nova] Releasing lock "refresh_cache-20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1058.290871] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquiring lock "0f5643d4-52f3-4cba-b71b-9c4370175e35" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.291360] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "0f5643d4-52f3-4cba-b71b-9c4370175e35" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1058.291519] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquiring lock "0f5643d4-52f3-4cba-b71b-9c4370175e35-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.291605] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "0f5643d4-52f3-4cba-b71b-9c4370175e35-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1058.291786] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "0f5643d4-52f3-4cba-b71b-9c4370175e35-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.296024] env[69927]: INFO nova.compute.manager [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Terminating instance [ 1058.392251] env[69927]: DEBUG nova.compute.utils [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1058.393667] env[69927]: DEBUG nova.compute.manager [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1058.393844] env[69927]: DEBUG nova.network.neutron [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1058.397895] env[69927]: DEBUG nova.objects.instance [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lazy-loading 'numa_topology' on Instance uuid 9c00e485-fd59-4571-abd5-80ca5e3bac1b {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1058.447607] env[69927]: DEBUG nova.policy [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f0389aa7508e4f9e9ffc40ce16370132', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f8d14934e39f45f3b8e9542d2dbc5c8a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1058.627024] env[69927]: DEBUG nova.compute.manager [req-0cbef104-7766-46b2-85b1-4f8635a55976 req-8419aa32-812e-414a-a2e6-2b325172559d service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Received event network-vif-plugged-699de5cd-28f8-453d-8f0a-7856f2d6a2b0 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1058.627216] env[69927]: DEBUG oslo_concurrency.lockutils [req-0cbef104-7766-46b2-85b1-4f8635a55976 req-8419aa32-812e-414a-a2e6-2b325172559d service nova] Acquiring lock "a2b1684f-82af-42fc-925e-db36f31cfe63-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.627473] env[69927]: DEBUG oslo_concurrency.lockutils [req-0cbef104-7766-46b2-85b1-4f8635a55976 req-8419aa32-812e-414a-a2e6-2b325172559d service nova] Lock "a2b1684f-82af-42fc-925e-db36f31cfe63-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1058.627614] env[69927]: DEBUG oslo_concurrency.lockutils [req-0cbef104-7766-46b2-85b1-4f8635a55976 req-8419aa32-812e-414a-a2e6-2b325172559d service nova] Lock "a2b1684f-82af-42fc-925e-db36f31cfe63-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.627778] env[69927]: DEBUG nova.compute.manager [req-0cbef104-7766-46b2-85b1-4f8635a55976 req-8419aa32-812e-414a-a2e6-2b325172559d service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] No waiting events found dispatching network-vif-plugged-699de5cd-28f8-453d-8f0a-7856f2d6a2b0 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1058.627941] env[69927]: WARNING nova.compute.manager [req-0cbef104-7766-46b2-85b1-4f8635a55976 req-8419aa32-812e-414a-a2e6-2b325172559d service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Received unexpected event network-vif-plugged-699de5cd-28f8-453d-8f0a-7856f2d6a2b0 for instance with vm_state shelved_offloaded and task_state spawning. [ 1058.628428] env[69927]: DEBUG nova.compute.manager [req-0cbef104-7766-46b2-85b1-4f8635a55976 req-8419aa32-812e-414a-a2e6-2b325172559d service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Received event network-changed-699de5cd-28f8-453d-8f0a-7856f2d6a2b0 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1058.628655] env[69927]: DEBUG nova.compute.manager [req-0cbef104-7766-46b2-85b1-4f8635a55976 req-8419aa32-812e-414a-a2e6-2b325172559d service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Refreshing instance network info cache due to event network-changed-699de5cd-28f8-453d-8f0a-7856f2d6a2b0. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1058.628865] env[69927]: DEBUG oslo_concurrency.lockutils [req-0cbef104-7766-46b2-85b1-4f8635a55976 req-8419aa32-812e-414a-a2e6-2b325172559d service nova] Acquiring lock "refresh_cache-a2b1684f-82af-42fc-925e-db36f31cfe63" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.677671] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Releasing lock "refresh_cache-a2b1684f-82af-42fc-925e-db36f31cfe63" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1058.680976] env[69927]: DEBUG oslo_concurrency.lockutils [req-0cbef104-7766-46b2-85b1-4f8635a55976 req-8419aa32-812e-414a-a2e6-2b325172559d service nova] Acquired lock "refresh_cache-a2b1684f-82af-42fc-925e-db36f31cfe63" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1058.681204] env[69927]: DEBUG nova.network.neutron [req-0cbef104-7766-46b2-85b1-4f8635a55976 req-8419aa32-812e-414a-a2e6-2b325172559d service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Refreshing network info cache for port 699de5cd-28f8-453d-8f0a-7856f2d6a2b0 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1058.729761] env[69927]: DEBUG nova.virt.hardware [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='e39ede03810624a54772191fa456fcb4',container_format='bare',created_at=2025-05-13T19:41:46Z,direct_url=,disk_format='vmdk',id=cb041012-44ce-40ce-ba24-60376d2f1762,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-264988809-shelved',owner='3b7ae5270b0643e6b5720d4f2f765d74',properties=ImageMetaProps,protected=,size=31670784,status='active',tags=,updated_at=2025-05-13T19:41:59Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1058.730636] env[69927]: DEBUG nova.virt.hardware [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1058.731088] env[69927]: DEBUG nova.virt.hardware [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1058.731293] env[69927]: DEBUG nova.virt.hardware [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1058.731657] env[69927]: DEBUG nova.virt.hardware [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1058.731805] env[69927]: DEBUG nova.virt.hardware [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1058.732130] env[69927]: DEBUG nova.virt.hardware [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1058.732241] env[69927]: DEBUG nova.virt.hardware [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1058.732807] env[69927]: DEBUG nova.virt.hardware [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1058.733016] env[69927]: DEBUG nova.virt.hardware [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1058.733208] env[69927]: DEBUG nova.virt.hardware [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1058.734847] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af56ef3-5f7e-468d-a968-cabb730e51bc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.745976] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73633624-931b-49fe-853e-3d240798386d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.766031] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:8d:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '510d3c47-3615-43d5-aa5d-a279fd915e71', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '699de5cd-28f8-453d-8f0a-7856f2d6a2b0', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1058.773995] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1058.774959] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1058.775328] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a0fb2127-8c98-45c6-85fc-f397dcfdb975 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.797836] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1058.797836] env[69927]: value = "task-4096404" [ 1058.797836] env[69927]: _type = "Task" [ 1058.797836] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.801822] env[69927]: DEBUG nova.compute.manager [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1058.802050] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1058.802968] env[69927]: DEBUG nova.network.neutron [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Successfully created port: 28547515-b928-4d6f-8849-dd2430f73245 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1058.805344] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2291869f-d33f-450b-a0ba-bb3179633967 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.815668] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1058.819874] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a818cd89-bc69-4ce6-a52f-3d75cc093bdd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.821506] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096404, 'name': CreateVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.828124] env[69927]: DEBUG oslo_vmware.api [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 1058.828124] env[69927]: value = "task-4096405" [ 1058.828124] env[69927]: _type = "Task" [ 1058.828124] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.846468] env[69927]: DEBUG oslo_vmware.api [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096405, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.900179] env[69927]: DEBUG nova.compute.manager [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1058.903956] env[69927]: DEBUG nova.objects.base [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Object Instance<9c00e485-fd59-4571-abd5-80ca5e3bac1b> lazy-loaded attributes: resources,numa_topology {{(pid=69927) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1059.315851] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096404, 'name': CreateVM_Task, 'duration_secs': 0.445081} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.316417] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1059.316961] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cb041012-44ce-40ce-ba24-60376d2f1762" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.316961] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cb041012-44ce-40ce-ba24-60376d2f1762" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1059.317432] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cb041012-44ce-40ce-ba24-60376d2f1762" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1059.317520] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0f94dc7-52e0-4b74-b70e-3cad126ccf62 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.326279] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1059.326279] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52200322-1618-ddb1-ab45-ab9e7965f821" [ 1059.326279] env[69927]: _type = "Task" [ 1059.326279] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.339249] env[69927]: DEBUG oslo_vmware.api [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096405, 'name': PowerOffVM_Task, 'duration_secs': 0.215358} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.345082] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1059.345628] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1059.345628] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52200322-1618-ddb1-ab45-ab9e7965f821, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.346107] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1dd91345-ece2-4daf-9e40-ac2d61c851b5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.363765] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe9f943-b01d-4ec0-b74d-e4a54020a6d6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.379305] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c391f8-a8f1-4bc5-bab0-8c3cd510dd82 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.422220] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3cc3d15-f7f0-4d46-b31c-df48b4608810 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.430184] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1059.430184] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1059.430184] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Deleting the datastore file [datastore2] 0f5643d4-52f3-4cba-b71b-9c4370175e35 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1059.430184] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6ca9dbe6-36c0-4bc1-ad58-905020db2387 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.435141] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b30721-a657-418c-86a5-46f8d0619b05 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.440476] env[69927]: DEBUG oslo_vmware.api [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for the task: (returnval){ [ 1059.440476] env[69927]: value = "task-4096407" [ 1059.440476] env[69927]: _type = "Task" [ 1059.440476] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.452823] env[69927]: DEBUG nova.compute.provider_tree [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1059.460407] env[69927]: DEBUG oslo_vmware.api [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096407, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.727670] env[69927]: DEBUG nova.network.neutron [req-0cbef104-7766-46b2-85b1-4f8635a55976 req-8419aa32-812e-414a-a2e6-2b325172559d service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Updated VIF entry in instance network info cache for port 699de5cd-28f8-453d-8f0a-7856f2d6a2b0. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1059.728107] env[69927]: DEBUG nova.network.neutron [req-0cbef104-7766-46b2-85b1-4f8635a55976 req-8419aa32-812e-414a-a2e6-2b325172559d service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Updating instance_info_cache with network_info: [{"id": "699de5cd-28f8-453d-8f0a-7856f2d6a2b0", "address": "fa:16:3e:87:8d:a8", "network": {"id": "4a19c4d6-f840-486a-b113-3ec076b5b34d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-437129762-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b7ae5270b0643e6b5720d4f2f765d74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "510d3c47-3615-43d5-aa5d-a279fd915e71", "external-id": "nsx-vlan-transportzone-436", "segmentation_id": 436, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap699de5cd-28", "ovs_interfaceid": "699de5cd-28f8-453d-8f0a-7856f2d6a2b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.839487] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cb041012-44ce-40ce-ba24-60376d2f1762" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1059.840193] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Processing image cb041012-44ce-40ce-ba24-60376d2f1762 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1059.841992] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cb041012-44ce-40ce-ba24-60376d2f1762/cb041012-44ce-40ce-ba24-60376d2f1762.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.841992] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cb041012-44ce-40ce-ba24-60376d2f1762/cb041012-44ce-40ce-ba24-60376d2f1762.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1059.842183] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1059.843838] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73947e53-4b75-4246-821c-533e45418b81 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.858031] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1059.858240] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1059.859385] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07166a54-2176-4f80-a6f9-a3589ce329fc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.865787] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1059.865787] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f66dcb-1dce-0dad-5b19-f283f108c115" [ 1059.865787] env[69927]: _type = "Task" [ 1059.865787] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.875272] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f66dcb-1dce-0dad-5b19-f283f108c115, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.929732] env[69927]: DEBUG nova.compute.manager [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1059.954769] env[69927]: DEBUG oslo_vmware.api [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Task: {'id': task-4096407, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.409779} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.960167] env[69927]: DEBUG nova.virt.hardware [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1059.960167] env[69927]: DEBUG nova.virt.hardware [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1059.960167] env[69927]: DEBUG nova.virt.hardware [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1059.960167] env[69927]: DEBUG nova.virt.hardware [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1059.960167] env[69927]: DEBUG nova.virt.hardware [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1059.960167] env[69927]: DEBUG nova.virt.hardware [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1059.960167] env[69927]: DEBUG nova.virt.hardware [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1059.960167] env[69927]: DEBUG nova.virt.hardware [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1059.960167] env[69927]: DEBUG nova.virt.hardware [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1059.960167] env[69927]: DEBUG nova.virt.hardware [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1059.960167] env[69927]: DEBUG nova.virt.hardware [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1059.960167] env[69927]: DEBUG nova.scheduler.client.report [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1059.963901] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1059.964117] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1059.964337] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1059.964457] env[69927]: INFO nova.compute.manager [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1059.964699] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1059.965677] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69583784-7ecb-4bd0-bf86-55229e6712a4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.968491] env[69927]: DEBUG nova.compute.manager [-] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1059.968591] env[69927]: DEBUG nova.network.neutron [-] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1059.977950] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b68995ee-bf85-4e7a-9d15-142f20ee17bd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.233832] env[69927]: DEBUG oslo_concurrency.lockutils [req-0cbef104-7766-46b2-85b1-4f8635a55976 req-8419aa32-812e-414a-a2e6-2b325172559d service nova] Releasing lock "refresh_cache-a2b1684f-82af-42fc-925e-db36f31cfe63" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.379572] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Preparing fetch location {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1060.379866] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Fetch image to [datastore1] OSTACK_IMG_4cf134ce-89e4-4899-9686-481baef46312/OSTACK_IMG_4cf134ce-89e4-4899-9686-481baef46312.vmdk {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1060.380082] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Downloading stream optimized image cb041012-44ce-40ce-ba24-60376d2f1762 to [datastore1] OSTACK_IMG_4cf134ce-89e4-4899-9686-481baef46312/OSTACK_IMG_4cf134ce-89e4-4899-9686-481baef46312.vmdk on the data store datastore1 as vApp {{(pid=69927) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1060.380314] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Downloading image file data cb041012-44ce-40ce-ba24-60376d2f1762 to the ESX as VM named 'OSTACK_IMG_4cf134ce-89e4-4899-9686-481baef46312' {{(pid=69927) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1060.392423] env[69927]: DEBUG nova.compute.manager [req-3939e436-ec96-4975-a027-d0053489ca77 req-8e68bded-aa76-44cb-866f-69244932888a service nova] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Received event network-vif-plugged-28547515-b928-4d6f-8849-dd2430f73245 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1060.393318] env[69927]: DEBUG oslo_concurrency.lockutils [req-3939e436-ec96-4975-a027-d0053489ca77 req-8e68bded-aa76-44cb-866f-69244932888a service nova] Acquiring lock "c6f166c7-538f-4c8a-9500-48319c694ea0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1060.393318] env[69927]: DEBUG oslo_concurrency.lockutils [req-3939e436-ec96-4975-a027-d0053489ca77 req-8e68bded-aa76-44cb-866f-69244932888a service nova] Lock "c6f166c7-538f-4c8a-9500-48319c694ea0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1060.393318] env[69927]: DEBUG oslo_concurrency.lockutils [req-3939e436-ec96-4975-a027-d0053489ca77 req-8e68bded-aa76-44cb-866f-69244932888a service nova] Lock "c6f166c7-538f-4c8a-9500-48319c694ea0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.393903] env[69927]: DEBUG nova.compute.manager [req-3939e436-ec96-4975-a027-d0053489ca77 req-8e68bded-aa76-44cb-866f-69244932888a service nova] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] No waiting events found dispatching network-vif-plugged-28547515-b928-4d6f-8849-dd2430f73245 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1060.393903] env[69927]: WARNING nova.compute.manager [req-3939e436-ec96-4975-a027-d0053489ca77 req-8e68bded-aa76-44cb-866f-69244932888a service nova] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Received unexpected event network-vif-plugged-28547515-b928-4d6f-8849-dd2430f73245 for instance with vm_state building and task_state spawning. [ 1060.465285] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.576s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.467785] env[69927]: DEBUG oslo_concurrency.lockutils [None req-50a623fd-926e-48f9-8e25-3467fb717f1e tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 29.161s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1060.481255] env[69927]: DEBUG oslo_vmware.rw_handles [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1060.481255] env[69927]: value = "resgroup-9" [ 1060.481255] env[69927]: _type = "ResourcePool" [ 1060.481255] env[69927]: }. {{(pid=69927) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1060.486024] env[69927]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-f18a20cb-e27b-4eff-b791-bfaacf40090e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.510992] env[69927]: DEBUG oslo_vmware.rw_handles [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lease: (returnval){ [ 1060.510992] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a87e5e-21e1-74d2-2bcd-df32210469d1" [ 1060.510992] env[69927]: _type = "HttpNfcLease" [ 1060.510992] env[69927]: } obtained for vApp import into resource pool (val){ [ 1060.510992] env[69927]: value = "resgroup-9" [ 1060.510992] env[69927]: _type = "ResourcePool" [ 1060.510992] env[69927]: }. {{(pid=69927) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1060.511247] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the lease: (returnval){ [ 1060.511247] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a87e5e-21e1-74d2-2bcd-df32210469d1" [ 1060.511247] env[69927]: _type = "HttpNfcLease" [ 1060.511247] env[69927]: } to be ready. {{(pid=69927) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1060.516931] env[69927]: DEBUG nova.network.neutron [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Successfully updated port: 28547515-b928-4d6f-8849-dd2430f73245 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1060.526207] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1060.526207] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a87e5e-21e1-74d2-2bcd-df32210469d1" [ 1060.526207] env[69927]: _type = "HttpNfcLease" [ 1060.526207] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1060.660584] env[69927]: DEBUG nova.compute.manager [req-e0f76eb2-1aba-4b55-87da-ff632a084209 req-589c79c0-7ae3-4a76-bcec-8648e9f2bd8d service nova] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Received event network-vif-deleted-ace9865c-a7d5-41ad-abee-d70d3aa00dc0 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1060.660785] env[69927]: INFO nova.compute.manager [req-e0f76eb2-1aba-4b55-87da-ff632a084209 req-589c79c0-7ae3-4a76-bcec-8648e9f2bd8d service nova] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Neutron deleted interface ace9865c-a7d5-41ad-abee-d70d3aa00dc0; detaching it from the instance and deleting it from the info cache [ 1060.661036] env[69927]: DEBUG nova.network.neutron [req-e0f76eb2-1aba-4b55-87da-ff632a084209 req-589c79c0-7ae3-4a76-bcec-8648e9f2bd8d service nova] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.710892] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Acquiring lock "2ae5fcf7-3111-4e80-80b0-f9c1cece1001" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1060.711146] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Lock "2ae5fcf7-3111-4e80-80b0-f9c1cece1001" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1060.944239] env[69927]: DEBUG nova.network.neutron [-] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.980164] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d7edb5cb-63aa-40ab-926e-b3e1df061d8e tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "9c00e485-fd59-4571-abd5-80ca5e3bac1b" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 53.606s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.981281] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2c42dd5f-8725-4acb-b963-b422b0f092c4 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "9c00e485-fd59-4571-abd5-80ca5e3bac1b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 28.997s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1060.981537] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2c42dd5f-8725-4acb-b963-b422b0f092c4 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "9c00e485-fd59-4571-abd5-80ca5e3bac1b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1060.981813] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2c42dd5f-8725-4acb-b963-b422b0f092c4 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "9c00e485-fd59-4571-abd5-80ca5e3bac1b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1060.982015] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2c42dd5f-8725-4acb-b963-b422b0f092c4 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "9c00e485-fd59-4571-abd5-80ca5e3bac1b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.984713] env[69927]: INFO nova.compute.manager [None req-2c42dd5f-8725-4acb-b963-b422b0f092c4 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Terminating instance [ 1061.021325] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Acquiring lock "refresh_cache-c6f166c7-538f-4c8a-9500-48319c694ea0" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.021325] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Acquired lock "refresh_cache-c6f166c7-538f-4c8a-9500-48319c694ea0" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1061.021325] env[69927]: DEBUG nova.network.neutron [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1061.027776] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1061.027776] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a87e5e-21e1-74d2-2bcd-df32210469d1" [ 1061.027776] env[69927]: _type = "HttpNfcLease" [ 1061.027776] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1061.168069] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01684162-fc88-43ae-ba66-235c3de1cc44 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.182030] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-949917a7-a185-4905-908c-352655e06111 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.221249] env[69927]: DEBUG nova.compute.manager [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1061.224592] env[69927]: DEBUG nova.compute.manager [req-e0f76eb2-1aba-4b55-87da-ff632a084209 req-589c79c0-7ae3-4a76-bcec-8648e9f2bd8d service nova] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Detach interface failed, port_id=ace9865c-a7d5-41ad-abee-d70d3aa00dc0, reason: Instance 0f5643d4-52f3-4cba-b71b-9c4370175e35 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1061.450197] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b6da5f96-e326-4247-bd8f-e5c0582db46e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.450521] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b6da5f96-e326-4247-bd8f-e5c0582db46e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.451861] env[69927]: INFO nova.compute.manager [-] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Took 1.48 seconds to deallocate network for instance. [ 1061.458435] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dfe9708-fa74-4eda-9fe0-567fd09d500a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.467485] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1433a915-4384-4dac-8518-ab347cdb4620 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.503195] env[69927]: DEBUG nova.compute.manager [None req-2c42dd5f-8725-4acb-b963-b422b0f092c4 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1061.503464] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2c42dd5f-8725-4acb-b963-b422b0f092c4 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1061.504261] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b397d34c-2fbf-402a-a181-cca0e43f1f32 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.506779] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8459ad1-3f43-4911-a26b-9354d798cbeb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.519784] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a14774-c885-4e3d-bcf9-befb4e850a65 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.528373] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c056b718-6db3-42e1-99d1-5b1edc8e706d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.543052] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1061.543052] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a87e5e-21e1-74d2-2bcd-df32210469d1" [ 1061.543052] env[69927]: _type = "HttpNfcLease" [ 1061.543052] env[69927]: } is ready. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1061.551157] env[69927]: DEBUG oslo_vmware.rw_handles [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1061.551157] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a87e5e-21e1-74d2-2bcd-df32210469d1" [ 1061.551157] env[69927]: _type = "HttpNfcLease" [ 1061.551157] env[69927]: }. {{(pid=69927) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1061.551684] env[69927]: DEBUG nova.compute.provider_tree [None req-50a623fd-926e-48f9-8e25-3467fb717f1e tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1061.564592] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf33b259-ea94-45ce-8fd5-b0dab4d3a0a7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.567470] env[69927]: WARNING nova.virt.vmwareapi.vmops [None req-2c42dd5f-8725-4acb-b963-b422b0f092c4 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9c00e485-fd59-4571-abd5-80ca5e3bac1b could not be found. [ 1061.567663] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2c42dd5f-8725-4acb-b963-b422b0f092c4 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1061.571022] env[69927]: INFO nova.compute.manager [None req-2c42dd5f-8725-4acb-b963-b422b0f092c4 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1061.571022] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2c42dd5f-8725-4acb-b963-b422b0f092c4 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1061.571022] env[69927]: DEBUG nova.scheduler.client.report [None req-50a623fd-926e-48f9-8e25-3467fb717f1e tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1061.572727] env[69927]: DEBUG nova.compute.manager [-] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1061.572817] env[69927]: DEBUG nova.network.neutron [-] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1061.576698] env[69927]: DEBUG nova.network.neutron [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1061.583342] env[69927]: DEBUG oslo_vmware.rw_handles [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b6e975-d991-4b6d-9f14-f3985ea854bd/disk-0.vmdk from lease info. {{(pid=69927) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1061.583527] env[69927]: DEBUG oslo_vmware.rw_handles [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Creating HTTP connection to write to file with size = 31670784 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b6e975-d991-4b6d-9f14-f3985ea854bd/disk-0.vmdk. {{(pid=69927) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1061.653782] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-66bd9432-a445-4a67-9f41-254596298b15 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.746555] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.783265] env[69927]: DEBUG nova.network.neutron [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Updating instance_info_cache with network_info: [{"id": "28547515-b928-4d6f-8849-dd2430f73245", "address": "fa:16:3e:96:82:c7", "network": {"id": "ee80b4b4-7068-47ee-960e-2bcd1829edee", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1555514968-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8d14934e39f45f3b8e9542d2dbc5c8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28547515-b9", "ovs_interfaceid": "28547515-b928-4d6f-8849-dd2430f73245", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.954337] env[69927]: DEBUG nova.compute.utils [None req-b6da5f96-e326-4247-bd8f-e5c0582db46e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1061.957888] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1062.286570] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Releasing lock "refresh_cache-c6f166c7-538f-4c8a-9500-48319c694ea0" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1062.286998] env[69927]: DEBUG nova.compute.manager [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Instance network_info: |[{"id": "28547515-b928-4d6f-8849-dd2430f73245", "address": "fa:16:3e:96:82:c7", "network": {"id": "ee80b4b4-7068-47ee-960e-2bcd1829edee", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1555514968-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8d14934e39f45f3b8e9542d2dbc5c8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28547515-b9", "ovs_interfaceid": "28547515-b928-4d6f-8849-dd2430f73245", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1062.287588] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:82:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '450939f7-f74b-41f7-93f7-b4fde6a6fbed', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '28547515-b928-4d6f-8849-dd2430f73245', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1062.296581] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Creating folder: Project (f8d14934e39f45f3b8e9542d2dbc5c8a). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1062.300671] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-510a7b7d-9ae8-4739-9d4f-2458cd96d23e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.315449] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Created folder: Project (f8d14934e39f45f3b8e9542d2dbc5c8a) in parent group-v811283. [ 1062.315732] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Creating folder: Instances. Parent ref: group-v811523. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1062.316029] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6419bbeb-4da1-42ac-80eb-8f68ad0a95ad {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.332751] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Created folder: Instances in parent group-v811523. [ 1062.333144] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1062.336122] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1062.336495] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1953262d-4948-4588-8649-2232c231f8ec {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.372491] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1062.372491] env[69927]: value = "task-4096411" [ 1062.372491] env[69927]: _type = "Task" [ 1062.372491] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.382249] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096411, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.457926] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b6da5f96-e326-4247-bd8f-e5c0582db46e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1062.581992] env[69927]: DEBUG oslo_concurrency.lockutils [None req-50a623fd-926e-48f9-8e25-3467fb717f1e tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.114s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1062.588533] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.222s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1062.588739] env[69927]: DEBUG nova.objects.instance [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lazy-loading 'resources' on Instance uuid 9348e368-cc3c-4bde-91ae-26fd03ad536a {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1062.713550] env[69927]: DEBUG nova.network.neutron [-] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.883783] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096411, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.954035] env[69927]: DEBUG nova.compute.manager [req-a3546123-1521-4d53-a9a2-cb261352628a req-99b9adfc-c229-4b8c-835e-c30da161021d service nova] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Received event network-changed-28547515-b928-4d6f-8849-dd2430f73245 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1062.954035] env[69927]: DEBUG nova.compute.manager [req-a3546123-1521-4d53-a9a2-cb261352628a req-99b9adfc-c229-4b8c-835e-c30da161021d service nova] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Refreshing instance network info cache due to event network-changed-28547515-b928-4d6f-8849-dd2430f73245. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1062.954035] env[69927]: DEBUG oslo_concurrency.lockutils [req-a3546123-1521-4d53-a9a2-cb261352628a req-99b9adfc-c229-4b8c-835e-c30da161021d service nova] Acquiring lock "refresh_cache-c6f166c7-538f-4c8a-9500-48319c694ea0" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.954035] env[69927]: DEBUG oslo_concurrency.lockutils [req-a3546123-1521-4d53-a9a2-cb261352628a req-99b9adfc-c229-4b8c-835e-c30da161021d service nova] Acquired lock "refresh_cache-c6f166c7-538f-4c8a-9500-48319c694ea0" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1062.954533] env[69927]: DEBUG nova.network.neutron [req-a3546123-1521-4d53-a9a2-cb261352628a req-99b9adfc-c229-4b8c-835e-c30da161021d service nova] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Refreshing network info cache for port 28547515-b928-4d6f-8849-dd2430f73245 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1062.964017] env[69927]: DEBUG oslo_vmware.rw_handles [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Completed reading data from the image iterator. {{(pid=69927) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1062.964362] env[69927]: DEBUG oslo_vmware.rw_handles [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b6e975-d991-4b6d-9f14-f3985ea854bd/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1062.965800] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec9c6bf-a706-494a-88d7-cddea1316e09 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.975761] env[69927]: DEBUG oslo_vmware.rw_handles [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b6e975-d991-4b6d-9f14-f3985ea854bd/disk-0.vmdk is in state: ready. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1062.975761] env[69927]: DEBUG oslo_vmware.rw_handles [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b6e975-d991-4b6d-9f14-f3985ea854bd/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1062.976070] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-30218390-a999-4694-8804-29e6e578c00c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.157416] env[69927]: INFO nova.scheduler.client.report [None req-50a623fd-926e-48f9-8e25-3467fb717f1e tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Deleted allocation for migration 9ad951cc-c116-4a21-8eba-8383479bc85e [ 1063.205802] env[69927]: DEBUG oslo_vmware.rw_handles [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b6e975-d991-4b6d-9f14-f3985ea854bd/disk-0.vmdk. {{(pid=69927) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1063.206316] env[69927]: INFO nova.virt.vmwareapi.images [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Downloaded image file data cb041012-44ce-40ce-ba24-60376d2f1762 [ 1063.208122] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49157e75-b1b2-49bb-b5dd-89f3f46f86d0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.225525] env[69927]: INFO nova.compute.manager [-] [instance: 9c00e485-fd59-4571-abd5-80ca5e3bac1b] Took 1.65 seconds to deallocate network for instance. [ 1063.230251] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7a3bdde1-6f45-429d-aeae-8bccf1420022 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.268946] env[69927]: INFO nova.virt.vmwareapi.images [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] The imported VM was unregistered [ 1063.271857] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Caching image {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1063.271857] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Creating directory with path [datastore1] devstack-image-cache_base/cb041012-44ce-40ce-ba24-60376d2f1762 {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1063.272696] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f48ba3b4-928a-4f84-95be-0807330c09a7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.294480] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Created directory with path [datastore1] devstack-image-cache_base/cb041012-44ce-40ce-ba24-60376d2f1762 {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1063.295034] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_4cf134ce-89e4-4899-9686-481baef46312/OSTACK_IMG_4cf134ce-89e4-4899-9686-481baef46312.vmdk to [datastore1] devstack-image-cache_base/cb041012-44ce-40ce-ba24-60376d2f1762/cb041012-44ce-40ce-ba24-60376d2f1762.vmdk. {{(pid=69927) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1063.295034] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-c690e526-04fc-40e4-af26-e42531cf4e02 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.303075] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1063.303075] env[69927]: value = "task-4096413" [ 1063.303075] env[69927]: _type = "Task" [ 1063.303075] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.315897] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096413, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.389541] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096411, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.511612] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79e06be0-d2ca-4b0d-a9b1-df0ba6082e8a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.520177] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23eb50af-2f7e-4997-87ff-6fedee93b6e5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.553413] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b6da5f96-e326-4247-bd8f-e5c0582db46e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.553681] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b6da5f96-e326-4247-bd8f-e5c0582db46e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.553920] env[69927]: INFO nova.compute.manager [None req-b6da5f96-e326-4247-bd8f-e5c0582db46e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Attaching volume 5e2d9a37-f003-4747-83b8-b7da94e44cee to /dev/sdb [ 1063.557449] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cac03fa9-3a39-4374-a9b6-6c2d81f72373 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.568237] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d283548-461e-4c61-8194-687b36f729a9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.585957] env[69927]: DEBUG nova.compute.provider_tree [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1063.606435] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23cfa1e7-b5d7-4869-a10b-35a89c3d8af7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.616878] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9af44aad-6c9a-4847-88c6-93c8dea088de {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.634657] env[69927]: DEBUG nova.virt.block_device [None req-b6da5f96-e326-4247-bd8f-e5c0582db46e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Updating existing volume attachment record: f8313c59-6d3e-42a4-9fcf-57eb80e0da2f {{(pid=69927) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1063.670103] env[69927]: DEBUG oslo_concurrency.lockutils [None req-50a623fd-926e-48f9-8e25-3467fb717f1e tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "5581f8af-9796-48ad-a2f3-557e90d9662a" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 35.759s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.815684] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096413, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.888203] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096411, 'name': CreateVM_Task, 'duration_secs': 1.429234} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.888203] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1063.888452] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.888718] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1063.889080] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1063.889385] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9623c064-c5f9-4fc1-b807-8959e4b30ce2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.896085] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Waiting for the task: (returnval){ [ 1063.896085] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5248c2df-4350-5046-8834-5ebb3d2f40e7" [ 1063.896085] env[69927]: _type = "Task" [ 1063.896085] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.908179] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5248c2df-4350-5046-8834-5ebb3d2f40e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.909573] env[69927]: DEBUG nova.network.neutron [req-a3546123-1521-4d53-a9a2-cb261352628a req-99b9adfc-c229-4b8c-835e-c30da161021d service nova] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Updated VIF entry in instance network info cache for port 28547515-b928-4d6f-8849-dd2430f73245. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1063.910125] env[69927]: DEBUG nova.network.neutron [req-a3546123-1521-4d53-a9a2-cb261352628a req-99b9adfc-c229-4b8c-835e-c30da161021d service nova] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Updating instance_info_cache with network_info: [{"id": "28547515-b928-4d6f-8849-dd2430f73245", "address": "fa:16:3e:96:82:c7", "network": {"id": "ee80b4b4-7068-47ee-960e-2bcd1829edee", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1555514968-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8d14934e39f45f3b8e9542d2dbc5c8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28547515-b9", "ovs_interfaceid": "28547515-b928-4d6f-8849-dd2430f73245", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1064.092238] env[69927]: DEBUG nova.scheduler.client.report [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1064.271817] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2c42dd5f-8725-4acb-b963-b422b0f092c4 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "9c00e485-fd59-4571-abd5-80ca5e3bac1b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.290s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1064.317985] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096413, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.410928] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5248c2df-4350-5046-8834-5ebb3d2f40e7, 'name': SearchDatastore_Task, 'duration_secs': 0.020883} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.411269] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1064.411633] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1064.412024] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.412158] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1064.412799] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1064.414148] env[69927]: DEBUG oslo_concurrency.lockutils [req-a3546123-1521-4d53-a9a2-cb261352628a req-99b9adfc-c229-4b8c-835e-c30da161021d service nova] Releasing lock "refresh_cache-c6f166c7-538f-4c8a-9500-48319c694ea0" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1064.415050] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2205af7f-ae1f-4e24-902e-dccc73a21eb2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.427772] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1064.428030] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1064.429145] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a33481a-fc6d-4e3b-af34-cbf0c585b0fb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.437834] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Waiting for the task: (returnval){ [ 1064.437834] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52136b79-b29a-e6e0-5b0b-d6728f7d8db7" [ 1064.437834] env[69927]: _type = "Task" [ 1064.437834] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.448247] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52136b79-b29a-e6e0-5b0b-d6728f7d8db7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.597640] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.010s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1064.600758] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.432s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.602402] env[69927]: INFO nova.compute.claims [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1064.607956] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d7342f-e51a-9a5e-2620-ced9a2da660f/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1064.609437] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a40329-b033-4b00-8073-a3a9e0118d32 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.617376] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d7342f-e51a-9a5e-2620-ced9a2da660f/disk-0.vmdk is in state: ready. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1064.617571] env[69927]: ERROR oslo_vmware.rw_handles [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d7342f-e51a-9a5e-2620-ced9a2da660f/disk-0.vmdk due to incomplete transfer. [ 1064.618198] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-9a2b8494-5680-45b2-a86d-9ccb7c5dba76 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.623736] env[69927]: INFO nova.scheduler.client.report [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Deleted allocations for instance 9348e368-cc3c-4bde-91ae-26fd03ad536a [ 1064.629744] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d7342f-e51a-9a5e-2620-ced9a2da660f/disk-0.vmdk. {{(pid=69927) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1064.629744] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Uploaded image f57cf18a-f065-4e7f-a53c-8cc53c2f655f to the Glance image server {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1064.629744] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Destroying the VM {{(pid=69927) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1064.631921] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c3bcf21d-449b-46bb-ae81-718e46286390 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.643934] env[69927]: DEBUG oslo_vmware.api [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1064.643934] env[69927]: value = "task-4096415" [ 1064.643934] env[69927]: _type = "Task" [ 1064.643934] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.653277] env[69927]: DEBUG oslo_vmware.api [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096415, 'name': Destroy_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.821247] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096413, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.951316] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52136b79-b29a-e6e0-5b0b-d6728f7d8db7, 'name': SearchDatastore_Task, 'duration_secs': 0.016044} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.951316] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c53510a5-c41a-4d58-b083-85c021a87925 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.960799] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Waiting for the task: (returnval){ [ 1064.960799] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520bffb8-619e-9fad-b78d-51a81a4dec78" [ 1064.960799] env[69927]: _type = "Task" [ 1064.960799] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.974748] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520bffb8-619e-9fad-b78d-51a81a4dec78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.064880] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "5581f8af-9796-48ad-a2f3-557e90d9662a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1065.064880] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "5581f8af-9796-48ad-a2f3-557e90d9662a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1065.064880] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "5581f8af-9796-48ad-a2f3-557e90d9662a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1065.064880] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "5581f8af-9796-48ad-a2f3-557e90d9662a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1065.064880] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "5581f8af-9796-48ad-a2f3-557e90d9662a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.067577] env[69927]: INFO nova.compute.manager [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Terminating instance [ 1065.141430] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6ee6e714-d62e-488d-b19e-3b36f9182b00 tempest-MigrationsAdminTest-685805879 tempest-MigrationsAdminTest-685805879-project-member] Lock "9348e368-cc3c-4bde-91ae-26fd03ad536a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.599s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.158476] env[69927]: DEBUG oslo_vmware.api [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096415, 'name': Destroy_Task} progress is 33%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.319243] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096413, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.477409] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520bffb8-619e-9fad-b78d-51a81a4dec78, 'name': SearchDatastore_Task, 'duration_secs': 0.024661} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.477409] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1065.477409] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] c6f166c7-538f-4c8a-9500-48319c694ea0/c6f166c7-538f-4c8a-9500-48319c694ea0.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1065.477409] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d17b0b3b-50a8-4fcd-99e1-2ed45b0b122c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.487368] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Waiting for the task: (returnval){ [ 1065.487368] env[69927]: value = "task-4096416" [ 1065.487368] env[69927]: _type = "Task" [ 1065.487368] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.500170] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Task: {'id': task-4096416, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.573516] env[69927]: DEBUG nova.compute.manager [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1065.573797] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1065.576821] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e209978-3ec9-4f69-b636-171154b6326f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.589360] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1065.590950] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-372c54ba-9440-4663-a02d-90982dd04d52 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.600192] env[69927]: DEBUG oslo_vmware.api [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1065.600192] env[69927]: value = "task-4096417" [ 1065.600192] env[69927]: _type = "Task" [ 1065.600192] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.612276] env[69927]: DEBUG oslo_vmware.api [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096417, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.661242] env[69927]: DEBUG oslo_vmware.api [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096415, 'name': Destroy_Task, 'duration_secs': 0.726953} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.661242] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Destroyed the VM [ 1065.661242] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Deleting Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1065.661242] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ea33a533-a06e-43bf-b049-62a923ae2905 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.673783] env[69927]: DEBUG oslo_vmware.api [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1065.673783] env[69927]: value = "task-4096418" [ 1065.673783] env[69927]: _type = "Task" [ 1065.673783] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.689283] env[69927]: DEBUG oslo_vmware.api [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096418, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.711690] env[69927]: DEBUG oslo_concurrency.lockutils [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "9aa0a285-66e4-4792-bbe9-a62f76666ec6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1065.711976] env[69927]: DEBUG oslo_concurrency.lockutils [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "9aa0a285-66e4-4792-bbe9-a62f76666ec6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1065.826155] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096413, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.001580] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Task: {'id': task-4096416, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.060808] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d5755fb-e57c-4f9c-bbf9-d3916ade5a0b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.070964] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "cba314de-644e-451e-8ecc-2e209d74bbce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.071409] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "cba314de-644e-451e-8ecc-2e209d74bbce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.076984] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d09eeaf-6064-4a00-92b8-82eb33e12c79 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.113874] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a317d9b3-f0fa-42b9-8619-061fce8031eb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.123460] env[69927]: DEBUG oslo_vmware.api [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096417, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.127668] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8127447e-d9c2-4ba6-8450-e3e44b0a30f1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.145156] env[69927]: DEBUG nova.compute.provider_tree [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1066.185113] env[69927]: DEBUG oslo_vmware.api [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096418, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.215147] env[69927]: DEBUG nova.compute.manager [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1066.319704] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096413, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.662179} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.320378] env[69927]: INFO nova.virt.vmwareapi.ds_util [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_4cf134ce-89e4-4899-9686-481baef46312/OSTACK_IMG_4cf134ce-89e4-4899-9686-481baef46312.vmdk to [datastore1] devstack-image-cache_base/cb041012-44ce-40ce-ba24-60376d2f1762/cb041012-44ce-40ce-ba24-60376d2f1762.vmdk. [ 1066.320838] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Cleaning up location [datastore1] OSTACK_IMG_4cf134ce-89e4-4899-9686-481baef46312 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1066.320838] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_4cf134ce-89e4-4899-9686-481baef46312 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1066.321036] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d454a79-f2ac-4476-9641-85cde8cac120 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.330576] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1066.330576] env[69927]: value = "task-4096420" [ 1066.330576] env[69927]: _type = "Task" [ 1066.330576] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.344319] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096420, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.500301] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Task: {'id': task-4096416, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.624901} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.500547] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] c6f166c7-538f-4c8a-9500-48319c694ea0/c6f166c7-538f-4c8a-9500-48319c694ea0.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1066.500830] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1066.501125] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-de2a2330-86fa-4716-b1f5-3d0d415176c9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.509290] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Waiting for the task: (returnval){ [ 1066.509290] env[69927]: value = "task-4096421" [ 1066.509290] env[69927]: _type = "Task" [ 1066.509290] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.518850] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Task: {'id': task-4096421, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.576840] env[69927]: DEBUG nova.compute.manager [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1066.621335] env[69927]: DEBUG oslo_vmware.api [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096417, 'name': PowerOffVM_Task, 'duration_secs': 0.531612} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.621703] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1066.621896] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1066.622210] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4725843a-2d2a-484a-ae74-17906256fdc2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.648701] env[69927]: DEBUG nova.scheduler.client.report [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1066.686263] env[69927]: DEBUG oslo_vmware.api [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096418, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.715063] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1066.715364] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1066.715504] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Deleting the datastore file [datastore2] 5581f8af-9796-48ad-a2f3-557e90d9662a {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1066.715884] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-19313ad4-2a52-44a3-a6b7-4040f52cec9c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.730825] env[69927]: DEBUG oslo_vmware.api [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1066.730825] env[69927]: value = "task-4096423" [ 1066.730825] env[69927]: _type = "Task" [ 1066.730825] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.742518] env[69927]: DEBUG oslo_vmware.api [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096423, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.744053] env[69927]: DEBUG oslo_concurrency.lockutils [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.842427] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096420, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.038402} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.842759] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1066.843076] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cb041012-44ce-40ce-ba24-60376d2f1762/cb041012-44ce-40ce-ba24-60376d2f1762.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1066.843249] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cb041012-44ce-40ce-ba24-60376d2f1762/cb041012-44ce-40ce-ba24-60376d2f1762.vmdk to [datastore1] a2b1684f-82af-42fc-925e-db36f31cfe63/a2b1684f-82af-42fc-925e-db36f31cfe63.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1066.843536] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a8ab2b6-e509-4c36-b163-22b10dd3dae4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.852354] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1066.852354] env[69927]: value = "task-4096424" [ 1066.852354] env[69927]: _type = "Task" [ 1066.852354] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.862499] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096424, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.020614] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Task: {'id': task-4096421, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073235} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.021193] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1067.021986] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3278060-1f5d-4fa2-b887-516b3737a1fb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.050077] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] c6f166c7-538f-4c8a-9500-48319c694ea0/c6f166c7-538f-4c8a-9500-48319c694ea0.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1067.050507] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-696f9b38-10ec-4640-af32-d2d2dfbb717e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.072147] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Waiting for the task: (returnval){ [ 1067.072147] env[69927]: value = "task-4096425" [ 1067.072147] env[69927]: _type = "Task" [ 1067.072147] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.087944] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Task: {'id': task-4096425, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.102584] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.156122] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.556s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.156965] env[69927]: DEBUG nova.compute.manager [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1067.159888] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.914s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.161781] env[69927]: INFO nova.compute.claims [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1067.193559] env[69927]: DEBUG oslo_vmware.api [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096418, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.245094] env[69927]: DEBUG oslo_vmware.api [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096423, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.369195] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096424, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.589911] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Task: {'id': task-4096425, 'name': ReconfigVM_Task, 'duration_secs': 0.452446} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.590249] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Reconfigured VM instance instance-00000053 to attach disk [datastore2] c6f166c7-538f-4c8a-9500-48319c694ea0/c6f166c7-538f-4c8a-9500-48319c694ea0.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1067.590920] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-06ec8c98-6724-4637-b172-28e287c24484 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.603147] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Waiting for the task: (returnval){ [ 1067.603147] env[69927]: value = "task-4096426" [ 1067.603147] env[69927]: _type = "Task" [ 1067.603147] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.617606] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Task: {'id': task-4096426, 'name': Rename_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.667504] env[69927]: DEBUG nova.compute.utils [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1067.671625] env[69927]: DEBUG nova.compute.manager [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1067.671625] env[69927]: DEBUG nova.network.neutron [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1067.694451] env[69927]: DEBUG oslo_vmware.api [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096418, 'name': RemoveSnapshot_Task, 'duration_secs': 1.645053} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.694757] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Deleted Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1067.695027] env[69927]: INFO nova.compute.manager [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Took 17.74 seconds to snapshot the instance on the hypervisor. [ 1067.739833] env[69927]: DEBUG nova.policy [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7e7924fc0a5d4897bd692bfc45863c98', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fe2e8435ef8a4e5c9ba3751736761cdc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1067.749315] env[69927]: DEBUG oslo_vmware.api [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096423, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.525833} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.749554] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1067.749770] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1067.750055] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1067.750168] env[69927]: INFO nova.compute.manager [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Took 2.18 seconds to destroy the instance on the hypervisor. [ 1067.750447] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1067.750655] env[69927]: DEBUG nova.compute.manager [-] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1067.750991] env[69927]: DEBUG nova.network.neutron [-] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1067.868519] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096424, 'name': CopyVirtualDisk_Task} progress is 29%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.119036] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Task: {'id': task-4096426, 'name': Rename_Task, 'duration_secs': 0.262059} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.119419] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1068.119763] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-78d43751-991a-4246-8e20-e2b6f60eb810 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.135025] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Waiting for the task: (returnval){ [ 1068.135025] env[69927]: value = "task-4096427" [ 1068.135025] env[69927]: _type = "Task" [ 1068.135025] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.151986] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Task: {'id': task-4096427, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.173545] env[69927]: DEBUG nova.compute.manager [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1068.227193] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6da5f96-e326-4247-bd8f-e5c0582db46e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Volume attach. Driver type: vmdk {{(pid=69927) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1068.227193] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6da5f96-e326-4247-bd8f-e5c0582db46e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811526', 'volume_id': '5e2d9a37-f003-4747-83b8-b7da94e44cee', 'name': 'volume-5e2d9a37-f003-4747-83b8-b7da94e44cee', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0c8e43a3-3f33-4a41-81d3-a98565dca4a7', 'attached_at': '', 'detached_at': '', 'volume_id': '5e2d9a37-f003-4747-83b8-b7da94e44cee', 'serial': '5e2d9a37-f003-4747-83b8-b7da94e44cee'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1068.227193] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a2a1ce4-86ac-4cf1-9c83-a1942ecb731c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.272174] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fce6a20-86f8-451d-be10-f5d1c18f855a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.310137] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6da5f96-e326-4247-bd8f-e5c0582db46e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] volume-5e2d9a37-f003-4747-83b8-b7da94e44cee/volume-5e2d9a37-f003-4747-83b8-b7da94e44cee.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1068.317404] env[69927]: DEBUG nova.compute.manager [None req-b07716c5-f0c9-4056-beb4-1bfd3f5e49e8 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Found 1 images (rotation: 2) {{(pid=69927) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1068.319954] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-513a5acb-3949-446f-94f8-ae4cca33e907 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.340644] env[69927]: DEBUG nova.network.neutron [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Successfully created port: 65e8d7bd-d544-4d00-974f-b16cf425a4dc {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1068.362729] env[69927]: DEBUG oslo_vmware.api [None req-b6da5f96-e326-4247-bd8f-e5c0582db46e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 1068.362729] env[69927]: value = "task-4096428" [ 1068.362729] env[69927]: _type = "Task" [ 1068.362729] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.371301] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096424, 'name': CopyVirtualDisk_Task} progress is 49%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.383133] env[69927]: DEBUG oslo_vmware.api [None req-b6da5f96-e326-4247-bd8f-e5c0582db46e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096428, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.458539] env[69927]: DEBUG nova.compute.manager [req-d20ca100-da65-4cd7-943d-16dad22fd43d req-59c96800-53a3-4d4e-8f5b-e8e00306f30d service nova] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Received event network-vif-deleted-fc4d69e0-0a53-4c34-8f56-6416a884b018 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1068.458717] env[69927]: INFO nova.compute.manager [req-d20ca100-da65-4cd7-943d-16dad22fd43d req-59c96800-53a3-4d4e-8f5b-e8e00306f30d service nova] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Neutron deleted interface fc4d69e0-0a53-4c34-8f56-6416a884b018; detaching it from the instance and deleting it from the info cache [ 1068.458916] env[69927]: DEBUG nova.network.neutron [req-d20ca100-da65-4cd7-943d-16dad22fd43d req-59c96800-53a3-4d4e-8f5b-e8e00306f30d service nova] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.653859] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Task: {'id': task-4096427, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.753875] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff335e1-9e29-46aa-abc8-f1ed5ed4bb50 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.770963] env[69927]: DEBUG nova.network.neutron [-] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.775368] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41669ebb-e47d-481f-8042-2c165d4a0c9b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.826898] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba59df1-52d1-474c-bee1-e53e97a4eff1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.841380] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27fdf5a5-d3ae-4ee9-847d-15d6c9b2bdc0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.864288] env[69927]: DEBUG nova.compute.provider_tree [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1068.880583] env[69927]: DEBUG oslo_vmware.api [None req-b6da5f96-e326-4247-bd8f-e5c0582db46e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096428, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.884461] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096424, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.964189] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b6cb4d24-0fb1-4674-86dc-c6616fb0e4f7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.981472] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a2c5fb-19a5-43e0-9080-e913a636987e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.026870] env[69927]: DEBUG nova.compute.manager [req-d20ca100-da65-4cd7-943d-16dad22fd43d req-59c96800-53a3-4d4e-8f5b-e8e00306f30d service nova] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Detach interface failed, port_id=fc4d69e0-0a53-4c34-8f56-6416a884b018, reason: Instance 5581f8af-9796-48ad-a2f3-557e90d9662a could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1069.151192] env[69927]: DEBUG oslo_vmware.api [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Task: {'id': task-4096427, 'name': PowerOnVM_Task, 'duration_secs': 0.660904} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.151543] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1069.151810] env[69927]: INFO nova.compute.manager [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Took 9.22 seconds to spawn the instance on the hypervisor. [ 1069.152030] env[69927]: DEBUG nova.compute.manager [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1069.152903] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f80a8942-0cda-42a5-ac34-fbec20ccc257 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.192106] env[69927]: DEBUG nova.compute.manager [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1069.221797] env[69927]: DEBUG nova.virt.hardware [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1069.222145] env[69927]: DEBUG nova.virt.hardware [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1069.222394] env[69927]: DEBUG nova.virt.hardware [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1069.222623] env[69927]: DEBUG nova.virt.hardware [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1069.222795] env[69927]: DEBUG nova.virt.hardware [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1069.222966] env[69927]: DEBUG nova.virt.hardware [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1069.223209] env[69927]: DEBUG nova.virt.hardware [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1069.223385] env[69927]: DEBUG nova.virt.hardware [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1069.223560] env[69927]: DEBUG nova.virt.hardware [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1069.224291] env[69927]: DEBUG nova.virt.hardware [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1069.224489] env[69927]: DEBUG nova.virt.hardware [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1069.226111] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2119fca6-10b1-4e0f-97bc-d055f2521442 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.244027] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d43a66f-58ee-4c9d-a0cb-1b9f094c93fc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.275107] env[69927]: INFO nova.compute.manager [-] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Took 1.52 seconds to deallocate network for instance. [ 1069.283511] env[69927]: DEBUG nova.compute.manager [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1069.284453] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a9d99bc-80b4-44a5-957a-9089fe66505f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.374127] env[69927]: DEBUG nova.scheduler.client.report [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1069.383990] env[69927]: DEBUG oslo_vmware.api [None req-b6da5f96-e326-4247-bd8f-e5c0582db46e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096428, 'name': ReconfigVM_Task, 'duration_secs': 0.959459} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.384626] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6da5f96-e326-4247-bd8f-e5c0582db46e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Reconfigured VM instance instance-0000004c to attach disk [datastore2] volume-5e2d9a37-f003-4747-83b8-b7da94e44cee/volume-5e2d9a37-f003-4747-83b8-b7da94e44cee.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1069.393082] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae146faf-6d2d-4fd9-84eb-ac6a633b0a2a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.403511] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096424, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.416348] env[69927]: DEBUG oslo_vmware.api [None req-b6da5f96-e326-4247-bd8f-e5c0582db46e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 1069.416348] env[69927]: value = "task-4096429" [ 1069.416348] env[69927]: _type = "Task" [ 1069.416348] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.430639] env[69927]: DEBUG oslo_vmware.api [None req-b6da5f96-e326-4247-bd8f-e5c0582db46e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096429, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.678975] env[69927]: INFO nova.compute.manager [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Took 41.01 seconds to build instance. [ 1069.793024] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1069.807018] env[69927]: INFO nova.compute.manager [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] instance snapshotting [ 1069.807018] env[69927]: DEBUG nova.objects.instance [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lazy-loading 'flavor' on Instance uuid 4b7934f8-2c97-480b-8af7-f09f6819e2b6 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1069.878239] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096424, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.805844} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.878239] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cb041012-44ce-40ce-ba24-60376d2f1762/cb041012-44ce-40ce-ba24-60376d2f1762.vmdk to [datastore1] a2b1684f-82af-42fc-925e-db36f31cfe63/a2b1684f-82af-42fc-925e-db36f31cfe63.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1069.879356] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bb1a665-de04-4263-8f17-6c4da68d5479 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.887075] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.726s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1069.887075] env[69927]: DEBUG nova.compute.manager [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1069.904142] env[69927]: DEBUG oslo_concurrency.lockutils [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.623s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1069.904516] env[69927]: DEBUG nova.objects.instance [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lazy-loading 'resources' on Instance uuid b750ce2c-ee85-46c6-bf12-edb3f088e6de {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1069.916457] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] a2b1684f-82af-42fc-925e-db36f31cfe63/a2b1684f-82af-42fc-925e-db36f31cfe63.vmdk or device None with type streamOptimized {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1069.921413] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8aaf3828-d3f3-437a-95d0-e10612dbb868 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.950325] env[69927]: DEBUG oslo_vmware.api [None req-b6da5f96-e326-4247-bd8f-e5c0582db46e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096429, 'name': ReconfigVM_Task, 'duration_secs': 0.302349} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.952608] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6da5f96-e326-4247-bd8f-e5c0582db46e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811526', 'volume_id': '5e2d9a37-f003-4747-83b8-b7da94e44cee', 'name': 'volume-5e2d9a37-f003-4747-83b8-b7da94e44cee', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0c8e43a3-3f33-4a41-81d3-a98565dca4a7', 'attached_at': '', 'detached_at': '', 'volume_id': '5e2d9a37-f003-4747-83b8-b7da94e44cee', 'serial': '5e2d9a37-f003-4747-83b8-b7da94e44cee'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1069.953764] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1069.953764] env[69927]: value = "task-4096430" [ 1069.953764] env[69927]: _type = "Task" [ 1069.953764] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.963506] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096430, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.026340] env[69927]: DEBUG nova.compute.manager [req-25898391-f637-4ea5-baf6-3e71d5b5449c req-e05b3243-71fb-4b3c-b012-e734db5cd7de service nova] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Received event network-vif-plugged-65e8d7bd-d544-4d00-974f-b16cf425a4dc {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1070.026689] env[69927]: DEBUG oslo_concurrency.lockutils [req-25898391-f637-4ea5-baf6-3e71d5b5449c req-e05b3243-71fb-4b3c-b012-e734db5cd7de service nova] Acquiring lock "dd4c3963-aa58-49f2-b675-9863ff13bddf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.026842] env[69927]: DEBUG oslo_concurrency.lockutils [req-25898391-f637-4ea5-baf6-3e71d5b5449c req-e05b3243-71fb-4b3c-b012-e734db5cd7de service nova] Lock "dd4c3963-aa58-49f2-b675-9863ff13bddf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.027102] env[69927]: DEBUG oslo_concurrency.lockutils [req-25898391-f637-4ea5-baf6-3e71d5b5449c req-e05b3243-71fb-4b3c-b012-e734db5cd7de service nova] Lock "dd4c3963-aa58-49f2-b675-9863ff13bddf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.027327] env[69927]: DEBUG nova.compute.manager [req-25898391-f637-4ea5-baf6-3e71d5b5449c req-e05b3243-71fb-4b3c-b012-e734db5cd7de service nova] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] No waiting events found dispatching network-vif-plugged-65e8d7bd-d544-4d00-974f-b16cf425a4dc {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1070.027513] env[69927]: WARNING nova.compute.manager [req-25898391-f637-4ea5-baf6-3e71d5b5449c req-e05b3243-71fb-4b3c-b012-e734db5cd7de service nova] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Received unexpected event network-vif-plugged-65e8d7bd-d544-4d00-974f-b16cf425a4dc for instance with vm_state building and task_state spawning. [ 1070.159670] env[69927]: DEBUG nova.network.neutron [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Successfully updated port: 65e8d7bd-d544-4d00-974f-b16cf425a4dc {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1070.183605] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5b023dce-96c4-4f40-aa75-020d170cc63d tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Lock "c6f166c7-538f-4c8a-9500-48319c694ea0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.520s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.314345] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5576e4f6-88aa-44d6-a07f-33e1ac3faa07 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.349235] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-740edd38-aff1-4101-a5fd-bf88395ae508 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.387808] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39dfc15a-e7f5-4e5a-bb24-76d3f868b97f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.396827] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b0c3b58-69f3-4a2a-8d62-af1d8f525b70 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.433612] env[69927]: DEBUG nova.compute.utils [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1070.436195] env[69927]: DEBUG nova.compute.manager [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1070.436499] env[69927]: DEBUG nova.network.neutron [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1070.439331] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7be40cdd-d8c1-40d6-aa85-636a779138d2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.242326] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Acquiring lock "refresh_cache-dd4c3963-aa58-49f2-b675-9863ff13bddf" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.242669] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Acquired lock "refresh_cache-dd4c3963-aa58-49f2-b675-9863ff13bddf" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1071.242669] env[69927]: DEBUG nova.network.neutron [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1071.244361] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Creating Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1071.244808] env[69927]: DEBUG nova.compute.manager [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1071.252677] env[69927]: DEBUG nova.policy [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '76414b2ae1aa4ab582c2b59fd4218005', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '544f2a021144492ba1aea46ce6075e53', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1071.255152] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b3fa5c86-d005-49fc-bef9-d9992fba4713 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.257938] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69c2472a-2311-494b-a035-472bb2da16e6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.280155] env[69927]: DEBUG nova.compute.provider_tree [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1071.287330] env[69927]: DEBUG oslo_vmware.api [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1071.287330] env[69927]: value = "task-4096431" [ 1071.287330] env[69927]: _type = "Task" [ 1071.287330] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.287640] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096430, 'name': ReconfigVM_Task, 'duration_secs': 1.302258} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.288363] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Reconfigured VM instance instance-00000032 to attach disk [datastore1] a2b1684f-82af-42fc-925e-db36f31cfe63/a2b1684f-82af-42fc-925e-db36f31cfe63.vmdk or device None with type streamOptimized {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1071.289964] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_name': '/dev/sda', 'size': 0, 'encrypted': False, 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'guest_format': None, 'encryption_secret_uuid': None, 'encryption_format': None, 'disk_bus': None, 'image_id': 'f524494e-9179-4b3e-a3e2-782f019def24'}], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': '8fd18ad0-83f7-45d0-98c0-f67f71ddcab1', 'delete_on_termination': False, 'device_type': None, 'boot_index': None, 'guest_format': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811505', 'volume_id': 'd0e7e15a-d1ef-48e6-8980-78f809252ca0', 'name': 'volume-d0e7e15a-d1ef-48e6-8980-78f809252ca0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'a2b1684f-82af-42fc-925e-db36f31cfe63', 'attached_at': '', 'detached_at': '', 'volume_id': 'd0e7e15a-d1ef-48e6-8980-78f809252ca0', 'serial': 'd0e7e15a-d1ef-48e6-8980-78f809252ca0'}, 'mount_device': '/dev/sdb', 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=69927) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1071.290568] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Volume attach. Driver type: vmdk {{(pid=69927) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1071.290568] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811505', 'volume_id': 'd0e7e15a-d1ef-48e6-8980-78f809252ca0', 'name': 'volume-d0e7e15a-d1ef-48e6-8980-78f809252ca0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'a2b1684f-82af-42fc-925e-db36f31cfe63', 'attached_at': '', 'detached_at': '', 'volume_id': 'd0e7e15a-d1ef-48e6-8980-78f809252ca0', 'serial': 'd0e7e15a-d1ef-48e6-8980-78f809252ca0'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1071.296964] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a3f6017-3b47-49aa-8c0c-4f41e51b383b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.311029] env[69927]: DEBUG oslo_vmware.api [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096431, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.325834] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e47f6083-ac26-4c59-8786-cb80530d7b68 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.354282] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] volume-d0e7e15a-d1ef-48e6-8980-78f809252ca0/volume-d0e7e15a-d1ef-48e6-8980-78f809252ca0.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1071.358016] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-47b36633-f195-424e-aed9-d004bff77a66 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.376703] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1071.376703] env[69927]: value = "task-4096432" [ 1071.376703] env[69927]: _type = "Task" [ 1071.376703] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.385837] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096432, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.776020] env[69927]: DEBUG oslo_concurrency.lockutils [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Acquiring lock "c6f166c7-538f-4c8a-9500-48319c694ea0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.776020] env[69927]: DEBUG oslo_concurrency.lockutils [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Lock "c6f166c7-538f-4c8a-9500-48319c694ea0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.776020] env[69927]: DEBUG oslo_concurrency.lockutils [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Acquiring lock "c6f166c7-538f-4c8a-9500-48319c694ea0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.776020] env[69927]: DEBUG oslo_concurrency.lockutils [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Lock "c6f166c7-538f-4c8a-9500-48319c694ea0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.776020] env[69927]: DEBUG oslo_concurrency.lockutils [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Lock "c6f166c7-538f-4c8a-9500-48319c694ea0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.776020] env[69927]: INFO nova.compute.manager [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Terminating instance [ 1071.787374] env[69927]: DEBUG nova.network.neutron [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1071.795863] env[69927]: DEBUG nova.scheduler.client.report [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1071.804624] env[69927]: DEBUG nova.objects.instance [None req-b6da5f96-e326-4247-bd8f-e5c0582db46e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lazy-loading 'flavor' on Instance uuid 0c8e43a3-3f33-4a41-81d3-a98565dca4a7 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1071.815216] env[69927]: DEBUG nova.network.neutron [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Successfully created port: 885a1391-dd28-4b0c-ae60-7ae0c571a32a {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1071.831528] env[69927]: DEBUG oslo_vmware.api [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096431, 'name': CreateSnapshot_Task, 'duration_secs': 0.515712} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.831528] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Created Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1071.834077] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de79ef2-d2b2-4be8-9f69-511d78c318c6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.896999] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096432, 'name': ReconfigVM_Task, 'duration_secs': 0.311765} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.897342] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Reconfigured VM instance instance-00000032 to attach disk [datastore1] volume-d0e7e15a-d1ef-48e6-8980-78f809252ca0/volume-d0e7e15a-d1ef-48e6-8980-78f809252ca0.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1071.912297] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a1a26302-680f-4781-a849-10680f76ea76 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.934734] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1071.934734] env[69927]: value = "task-4096433" [ 1071.934734] env[69927]: _type = "Task" [ 1071.934734] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.945695] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096433, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.120534] env[69927]: DEBUG nova.network.neutron [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Updating instance_info_cache with network_info: [{"id": "65e8d7bd-d544-4d00-974f-b16cf425a4dc", "address": "fa:16:3e:b0:26:5d", "network": {"id": "f63cc251-962f-42fc-bc3c-1c72a35b2ef3", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-166410113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fe2e8435ef8a4e5c9ba3751736761cdc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65e8d7bd-d5", "ovs_interfaceid": "65e8d7bd-d544-4d00-974f-b16cf425a4dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.269488] env[69927]: DEBUG nova.compute.manager [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1072.282401] env[69927]: DEBUG nova.compute.manager [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1072.282701] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1072.283808] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8299cf7d-9455-46c1-8f30-dcd48975f674 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.292855] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1072.294922] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-be35e65d-8490-4423-8c00-2094c1156f7c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.304071] env[69927]: DEBUG oslo_vmware.api [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Waiting for the task: (returnval){ [ 1072.304071] env[69927]: value = "task-4096434" [ 1072.304071] env[69927]: _type = "Task" [ 1072.304071] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.307102] env[69927]: DEBUG nova.virt.hardware [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1072.307102] env[69927]: DEBUG nova.virt.hardware [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1072.307102] env[69927]: DEBUG nova.virt.hardware [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1072.307102] env[69927]: DEBUG nova.virt.hardware [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1072.307102] env[69927]: DEBUG nova.virt.hardware [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1072.307102] env[69927]: DEBUG nova.virt.hardware [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1072.310077] env[69927]: DEBUG nova.virt.hardware [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1072.310077] env[69927]: DEBUG nova.virt.hardware [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1072.310077] env[69927]: DEBUG nova.virt.hardware [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1072.310077] env[69927]: DEBUG nova.virt.hardware [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1072.310077] env[69927]: DEBUG nova.virt.hardware [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1072.310077] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9303394-3b9d-4a3d-bc1e-407e187a5167 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.316157] env[69927]: DEBUG oslo_concurrency.lockutils [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.413s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1072.320665] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b6da5f96-e326-4247-bd8f-e5c0582db46e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.767s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1072.321665] env[69927]: DEBUG oslo_concurrency.lockutils [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.918s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1072.321999] env[69927]: DEBUG nova.objects.instance [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Lazy-loading 'resources' on Instance uuid 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1072.329883] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e9e98f-a15f-4747-9d6e-95e2eb145a18 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.335517] env[69927]: DEBUG oslo_vmware.api [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Task: {'id': task-4096434, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.369211] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Creating linked-clone VM from snapshot {{(pid=69927) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1072.373046] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0f9951c1-22b1-4750-9c12-d15bf6d6961a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.377380] env[69927]: INFO nova.scheduler.client.report [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Deleted allocations for instance b750ce2c-ee85-46c6-bf12-edb3f088e6de [ 1072.390115] env[69927]: DEBUG oslo_vmware.api [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1072.390115] env[69927]: value = "task-4096435" [ 1072.390115] env[69927]: _type = "Task" [ 1072.390115] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.402761] env[69927]: DEBUG oslo_vmware.api [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096435, 'name': CloneVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.445701] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096433, 'name': ReconfigVM_Task, 'duration_secs': 0.165496} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.446632] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811505', 'volume_id': 'd0e7e15a-d1ef-48e6-8980-78f809252ca0', 'name': 'volume-d0e7e15a-d1ef-48e6-8980-78f809252ca0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'a2b1684f-82af-42fc-925e-db36f31cfe63', 'attached_at': '', 'detached_at': '', 'volume_id': 'd0e7e15a-d1ef-48e6-8980-78f809252ca0', 'serial': 'd0e7e15a-d1ef-48e6-8980-78f809252ca0'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1072.447379] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-507580cd-8f90-4f40-96bb-91f10fd986d3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.456157] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1072.456157] env[69927]: value = "task-4096436" [ 1072.456157] env[69927]: _type = "Task" [ 1072.456157] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.466285] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096436, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.558288] env[69927]: DEBUG nova.compute.manager [req-10770dbc-b745-419d-ae2a-77e8dca9ed23 req-d98098cc-3563-480e-a034-9be6d284e233 service nova] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Received event network-changed-65e8d7bd-d544-4d00-974f-b16cf425a4dc {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1072.558288] env[69927]: DEBUG nova.compute.manager [req-10770dbc-b745-419d-ae2a-77e8dca9ed23 req-d98098cc-3563-480e-a034-9be6d284e233 service nova] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Refreshing instance network info cache due to event network-changed-65e8d7bd-d544-4d00-974f-b16cf425a4dc. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1072.558503] env[69927]: DEBUG oslo_concurrency.lockutils [req-10770dbc-b745-419d-ae2a-77e8dca9ed23 req-d98098cc-3563-480e-a034-9be6d284e233 service nova] Acquiring lock "refresh_cache-dd4c3963-aa58-49f2-b675-9863ff13bddf" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.625981] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Releasing lock "refresh_cache-dd4c3963-aa58-49f2-b675-9863ff13bddf" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1072.625981] env[69927]: DEBUG nova.compute.manager [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Instance network_info: |[{"id": "65e8d7bd-d544-4d00-974f-b16cf425a4dc", "address": "fa:16:3e:b0:26:5d", "network": {"id": "f63cc251-962f-42fc-bc3c-1c72a35b2ef3", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-166410113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fe2e8435ef8a4e5c9ba3751736761cdc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65e8d7bd-d5", "ovs_interfaceid": "65e8d7bd-d544-4d00-974f-b16cf425a4dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1072.625981] env[69927]: DEBUG oslo_concurrency.lockutils [req-10770dbc-b745-419d-ae2a-77e8dca9ed23 req-d98098cc-3563-480e-a034-9be6d284e233 service nova] Acquired lock "refresh_cache-dd4c3963-aa58-49f2-b675-9863ff13bddf" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1072.625981] env[69927]: DEBUG nova.network.neutron [req-10770dbc-b745-419d-ae2a-77e8dca9ed23 req-d98098cc-3563-480e-a034-9be6d284e233 service nova] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Refreshing network info cache for port 65e8d7bd-d544-4d00-974f-b16cf425a4dc {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1072.625981] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:26:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7d2575f-b92f-44ec-a863-634cb76631a2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '65e8d7bd-d544-4d00-974f-b16cf425a4dc', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1072.635553] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Creating folder: Project (fe2e8435ef8a4e5c9ba3751736761cdc). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1072.636747] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b8d2ddd4-3143-4d39-b695-363e31a98b27 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.652996] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Created folder: Project (fe2e8435ef8a4e5c9ba3751736761cdc) in parent group-v811283. [ 1072.652996] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Creating folder: Instances. Parent ref: group-v811529. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1072.652996] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3679bb30-a404-4dff-adb5-bcd7175fdb74 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.665786] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Created folder: Instances in parent group-v811529. [ 1072.666488] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1072.668200] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1072.668200] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-82f4fe4b-8482-4466-85c2-da8a36c1e677 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.696442] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1072.696442] env[69927]: value = "task-4096439" [ 1072.696442] env[69927]: _type = "Task" [ 1072.696442] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.706457] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096439, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.821092] env[69927]: DEBUG oslo_vmware.api [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Task: {'id': task-4096434, 'name': PowerOffVM_Task, 'duration_secs': 0.202506} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.821448] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1072.821728] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1072.822035] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bb121a21-af2a-451b-834f-481a79c3a199 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.889596] env[69927]: DEBUG oslo_concurrency.lockutils [None req-19b39e54-2583-4052-ae05-db6401bbbd58 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "b750ce2c-ee85-46c6-bf12-edb3f088e6de" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.331s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1072.910198] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1072.910198] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1072.910198] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Deleting the datastore file [datastore2] c6f166c7-538f-4c8a-9500-48319c694ea0 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1072.914383] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f169d712-9974-4dad-bb85-bbe4d96767bb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.917421] env[69927]: DEBUG oslo_vmware.api [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096435, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.925582] env[69927]: DEBUG oslo_vmware.api [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Waiting for the task: (returnval){ [ 1072.925582] env[69927]: value = "task-4096441" [ 1072.925582] env[69927]: _type = "Task" [ 1072.925582] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.942306] env[69927]: DEBUG oslo_vmware.api [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Task: {'id': task-4096441, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.971748] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096436, 'name': Rename_Task, 'duration_secs': 0.180304} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.975160] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1072.976261] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7227b412-dfdd-43f6-aa60-59f94c227f0c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.988256] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1072.988256] env[69927]: value = "task-4096442" [ 1072.988256] env[69927]: _type = "Task" [ 1072.988256] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.005501] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096442, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.165372] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d858fdb5-0026-4197-9917-9f3e927e8273 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.165645] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d858fdb5-0026-4197-9917-9f3e927e8273 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.211638] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096439, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.380277] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8dcf455-60ed-4d22-b655-5e8220b7a70f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.399094] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced18a77-f82a-41b2-b948-306428d4da30 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.408320] env[69927]: DEBUG oslo_vmware.api [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096435, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.439334] env[69927]: DEBUG nova.network.neutron [req-10770dbc-b745-419d-ae2a-77e8dca9ed23 req-d98098cc-3563-480e-a034-9be6d284e233 service nova] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Updated VIF entry in instance network info cache for port 65e8d7bd-d544-4d00-974f-b16cf425a4dc. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1073.439751] env[69927]: DEBUG nova.network.neutron [req-10770dbc-b745-419d-ae2a-77e8dca9ed23 req-d98098cc-3563-480e-a034-9be6d284e233 service nova] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Updating instance_info_cache with network_info: [{"id": "65e8d7bd-d544-4d00-974f-b16cf425a4dc", "address": "fa:16:3e:b0:26:5d", "network": {"id": "f63cc251-962f-42fc-bc3c-1c72a35b2ef3", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-166410113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fe2e8435ef8a4e5c9ba3751736761cdc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65e8d7bd-d5", "ovs_interfaceid": "65e8d7bd-d544-4d00-974f-b16cf425a4dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1073.445814] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb64ad9-c2e1-4811-ba75-16ddb1e63db0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.458713] env[69927]: DEBUG oslo_vmware.api [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Task: {'id': task-4096441, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.267662} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.459972] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d4f66b7-7eff-4f33-ada6-6cf0f4af005f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.464531] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1073.465568] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1073.465816] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1073.466011] env[69927]: INFO nova.compute.manager [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1073.466284] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1073.466866] env[69927]: DEBUG nova.compute.manager [-] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1073.466957] env[69927]: DEBUG nova.network.neutron [-] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1073.481551] env[69927]: DEBUG nova.compute.provider_tree [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1073.498424] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096442, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.665509] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "44e81156-b0c7-4f68-9732-b39f41ebcd4b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.665509] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "44e81156-b0c7-4f68-9732-b39f41ebcd4b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.665841] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "44e81156-b0c7-4f68-9732-b39f41ebcd4b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.665841] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "44e81156-b0c7-4f68-9732-b39f41ebcd4b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.665996] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "44e81156-b0c7-4f68-9732-b39f41ebcd4b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.669016] env[69927]: DEBUG nova.compute.utils [None req-d858fdb5-0026-4197-9917-9f3e927e8273 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1073.670721] env[69927]: INFO nova.compute.manager [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Terminating instance [ 1073.710122] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096439, 'name': CreateVM_Task, 'duration_secs': 0.639651} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.710302] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1073.711060] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.711272] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1073.711600] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1073.711941] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd8dacaf-93bb-41f2-85dd-7079a54177c3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.718509] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Waiting for the task: (returnval){ [ 1073.718509] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5270b508-dd9f-7a8a-698f-7e0f61e6bb80" [ 1073.718509] env[69927]: _type = "Task" [ 1073.718509] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.729119] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5270b508-dd9f-7a8a-698f-7e0f61e6bb80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.901731] env[69927]: DEBUG oslo_vmware.api [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096435, 'name': CloneVM_Task, 'duration_secs': 1.310907} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.902019] env[69927]: INFO nova.virt.vmwareapi.vmops [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Created linked-clone VM from snapshot [ 1073.902764] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5de3ea1-87ad-4862-b6dd-52799e4c167b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.912582] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Uploading image 3b121e58-ab8b-4cf1-84df-d0dcd99b40cb {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1073.949179] env[69927]: DEBUG nova.network.neutron [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Successfully updated port: 885a1391-dd28-4b0c-ae60-7ae0c571a32a {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1073.950508] env[69927]: DEBUG oslo_concurrency.lockutils [req-10770dbc-b745-419d-ae2a-77e8dca9ed23 req-d98098cc-3563-480e-a034-9be6d284e233 service nova] Releasing lock "refresh_cache-dd4c3963-aa58-49f2-b675-9863ff13bddf" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1073.959091] env[69927]: DEBUG oslo_vmware.rw_handles [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1073.959091] env[69927]: value = "vm-811528" [ 1073.959091] env[69927]: _type = "VirtualMachine" [ 1073.959091] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1073.959396] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-9d6f02a8-6424-43cd-94a0-5fe0225aee7a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.968070] env[69927]: DEBUG oslo_vmware.rw_handles [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lease: (returnval){ [ 1073.968070] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5238c472-4e2d-0258-def2-028aba279f0a" [ 1073.968070] env[69927]: _type = "HttpNfcLease" [ 1073.968070] env[69927]: } obtained for exporting VM: (result){ [ 1073.968070] env[69927]: value = "vm-811528" [ 1073.968070] env[69927]: _type = "VirtualMachine" [ 1073.968070] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1073.968488] env[69927]: DEBUG oslo_vmware.api [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the lease: (returnval){ [ 1073.968488] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5238c472-4e2d-0258-def2-028aba279f0a" [ 1073.968488] env[69927]: _type = "HttpNfcLease" [ 1073.968488] env[69927]: } to be ready. {{(pid=69927) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1073.976644] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1073.976644] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5238c472-4e2d-0258-def2-028aba279f0a" [ 1073.976644] env[69927]: _type = "HttpNfcLease" [ 1073.976644] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1073.985236] env[69927]: DEBUG nova.scheduler.client.report [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1073.998904] env[69927]: DEBUG oslo_vmware.api [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096442, 'name': PowerOnVM_Task, 'duration_secs': 0.5584} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.000692] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1074.131047] env[69927]: DEBUG nova.compute.manager [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1074.131047] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e81102e6-c829-4c13-a59d-3e745ce9754e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.181867] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d858fdb5-0026-4197-9917-9f3e927e8273 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.016s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.183771] env[69927]: DEBUG nova.compute.manager [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1074.183771] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1074.184077] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d80ac139-11d5-4813-b695-20c702a34f2b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.193142] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1074.193420] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-76dae252-e796-4103-ad29-12d8474b0219 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.201898] env[69927]: DEBUG oslo_vmware.api [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1074.201898] env[69927]: value = "task-4096447" [ 1074.201898] env[69927]: _type = "Task" [ 1074.201898] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.213693] env[69927]: DEBUG oslo_vmware.api [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096447, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.230765] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5270b508-dd9f-7a8a-698f-7e0f61e6bb80, 'name': SearchDatastore_Task, 'duration_secs': 0.011192} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.231203] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1074.231393] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1074.231618] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.233075] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1074.233075] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1074.233075] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f72557eb-6407-4795-89c5-6dafa8710bee {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.244688] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1074.244778] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1074.245608] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f74f3204-d5a8-4029-a929-1a233c2a0b5a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.249212] env[69927]: DEBUG nova.network.neutron [-] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1074.255362] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Waiting for the task: (returnval){ [ 1074.255362] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f0007e-3c7b-8fd0-4b42-bc58d244ac5d" [ 1074.255362] env[69927]: _type = "Task" [ 1074.255362] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.266029] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f0007e-3c7b-8fd0-4b42-bc58d244ac5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.451708] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "refresh_cache-b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.451930] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired lock "refresh_cache-b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1074.452122] env[69927]: DEBUG nova.network.neutron [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1074.478148] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1074.478148] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5238c472-4e2d-0258-def2-028aba279f0a" [ 1074.478148] env[69927]: _type = "HttpNfcLease" [ 1074.478148] env[69927]: } is ready. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1074.478479] env[69927]: DEBUG oslo_vmware.rw_handles [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1074.478479] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5238c472-4e2d-0258-def2-028aba279f0a" [ 1074.478479] env[69927]: _type = "HttpNfcLease" [ 1074.478479] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1074.479227] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb34f129-5bdf-4ce2-a7a0-820f11587e5f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.490136] env[69927]: DEBUG oslo_vmware.rw_handles [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52deee24-7c5a-527e-58f9-6533179d5abd/disk-0.vmdk from lease info. {{(pid=69927) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1074.490136] env[69927]: DEBUG oslo_vmware.rw_handles [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52deee24-7c5a-527e-58f9-6533179d5abd/disk-0.vmdk for reading. {{(pid=69927) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1074.493231] env[69927]: DEBUG oslo_concurrency.lockutils [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.171s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.549662] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.800s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1074.551289] env[69927]: INFO nova.compute.claims [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1074.562412] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "d9347f31-b908-4561-9b57-1ea79b762168" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1074.562809] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "d9347f31-b908-4561-9b57-1ea79b762168" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1074.567410] env[69927]: DEBUG nova.compute.manager [req-defcfca8-9bc1-4b34-a108-d39210ccd4a4 req-0ba51313-a3b2-42a1-9fa4-a1ec17780834 service nova] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Received event network-vif-deleted-28547515-b928-4d6f-8849-dd2430f73245 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1074.575493] env[69927]: INFO nova.scheduler.client.report [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Deleted allocations for instance 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73 [ 1074.639028] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e47a70c2-ee45-4bee-b7b0-5e223ca11c4a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.654083] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a71eb5aa-b3e9-4899-b603-04c8ad9ee65d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "a2b1684f-82af-42fc-925e-db36f31cfe63" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 55.634s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.713111] env[69927]: DEBUG oslo_vmware.api [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096447, 'name': PowerOffVM_Task, 'duration_secs': 0.380274} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.713111] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1074.713311] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1074.713441] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bdbb9608-9b2a-47ca-90bd-01be256d9d85 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.752056] env[69927]: INFO nova.compute.manager [-] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Took 1.28 seconds to deallocate network for instance. [ 1074.766664] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f0007e-3c7b-8fd0-4b42-bc58d244ac5d, 'name': SearchDatastore_Task, 'duration_secs': 0.014202} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.767547] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22a6f3b3-8515-45c0-8e19-bd7c927cb7bc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.773554] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Waiting for the task: (returnval){ [ 1074.773554] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52401722-380d-6e97-b376-8025e265eb3f" [ 1074.773554] env[69927]: _type = "Task" [ 1074.773554] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.784893] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52401722-380d-6e97-b376-8025e265eb3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.787534] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1074.787778] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1074.787990] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Deleting the datastore file [datastore1] 44e81156-b0c7-4f68-9732-b39f41ebcd4b {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1074.788326] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a56044b-face-46cc-bac3-bc9cc6fa9489 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.796934] env[69927]: DEBUG oslo_vmware.api [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1074.796934] env[69927]: value = "task-4096449" [ 1074.796934] env[69927]: _type = "Task" [ 1074.796934] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.808039] env[69927]: DEBUG oslo_vmware.api [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096449, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.895613] env[69927]: DEBUG nova.compute.manager [req-2ce0af5d-770c-4bdd-9168-2706c01bb35d req-0efb664d-9705-4605-854b-fb4c7c911089 service nova] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Received event network-vif-plugged-885a1391-dd28-4b0c-ae60-7ae0c571a32a {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1074.896062] env[69927]: DEBUG oslo_concurrency.lockutils [req-2ce0af5d-770c-4bdd-9168-2706c01bb35d req-0efb664d-9705-4605-854b-fb4c7c911089 service nova] Acquiring lock "b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1074.896535] env[69927]: DEBUG oslo_concurrency.lockutils [req-2ce0af5d-770c-4bdd-9168-2706c01bb35d req-0efb664d-9705-4605-854b-fb4c7c911089 service nova] Lock "b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1074.896742] env[69927]: DEBUG oslo_concurrency.lockutils [req-2ce0af5d-770c-4bdd-9168-2706c01bb35d req-0efb664d-9705-4605-854b-fb4c7c911089 service nova] Lock "b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.896928] env[69927]: DEBUG nova.compute.manager [req-2ce0af5d-770c-4bdd-9168-2706c01bb35d req-0efb664d-9705-4605-854b-fb4c7c911089 service nova] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] No waiting events found dispatching network-vif-plugged-885a1391-dd28-4b0c-ae60-7ae0c571a32a {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1074.897118] env[69927]: WARNING nova.compute.manager [req-2ce0af5d-770c-4bdd-9168-2706c01bb35d req-0efb664d-9705-4605-854b-fb4c7c911089 service nova] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Received unexpected event network-vif-plugged-885a1391-dd28-4b0c-ae60-7ae0c571a32a for instance with vm_state building and task_state spawning. [ 1074.897295] env[69927]: DEBUG nova.compute.manager [req-2ce0af5d-770c-4bdd-9168-2706c01bb35d req-0efb664d-9705-4605-854b-fb4c7c911089 service nova] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Received event network-changed-885a1391-dd28-4b0c-ae60-7ae0c571a32a {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1074.897499] env[69927]: DEBUG nova.compute.manager [req-2ce0af5d-770c-4bdd-9168-2706c01bb35d req-0efb664d-9705-4605-854b-fb4c7c911089 service nova] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Refreshing instance network info cache due to event network-changed-885a1391-dd28-4b0c-ae60-7ae0c571a32a. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1074.897682] env[69927]: DEBUG oslo_concurrency.lockutils [req-2ce0af5d-770c-4bdd-9168-2706c01bb35d req-0efb664d-9705-4605-854b-fb4c7c911089 service nova] Acquiring lock "refresh_cache-b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.997013] env[69927]: DEBUG nova.network.neutron [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1075.068093] env[69927]: DEBUG nova.compute.manager [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1075.087923] env[69927]: DEBUG oslo_concurrency.lockutils [None req-74309ba1-8d16-4896-9a1c-c4810973eeed tempest-ServersTestFqdnHostnames-61407453 tempest-ServersTestFqdnHostnames-61407453-project-member] Lock "7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.610s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.193682] env[69927]: DEBUG nova.network.neutron [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Updating instance_info_cache with network_info: [{"id": "885a1391-dd28-4b0c-ae60-7ae0c571a32a", "address": "fa:16:3e:ff:97:92", "network": {"id": "5e9a6068-d0f1-44b2-b2a2-b87ebc0d53f2", "bridge": "br-int", "label": "tempest-ServersTestJSON-2067990706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "544f2a021144492ba1aea46ce6075e53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap885a1391-dd", "ovs_interfaceid": "885a1391-dd28-4b0c-ae60-7ae0c571a32a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.244975] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d858fdb5-0026-4197-9917-9f3e927e8273 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.246080] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d858fdb5-0026-4197-9917-9f3e927e8273 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.247203] env[69927]: INFO nova.compute.manager [None req-d858fdb5-0026-4197-9917-9f3e927e8273 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Attaching volume f46710fb-384b-4d23-a1bb-cfb413e80958 to /dev/sdc [ 1075.262511] env[69927]: DEBUG oslo_concurrency.lockutils [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.286646] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52401722-380d-6e97-b376-8025e265eb3f, 'name': SearchDatastore_Task, 'duration_secs': 0.021421} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.288077] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1075.288479] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] dd4c3963-aa58-49f2-b675-9863ff13bddf/dd4c3963-aa58-49f2-b675-9863ff13bddf.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1075.289239] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4abe31b7-830c-4bc7-9bf2-675a3f1a12da {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.293257] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7116c02a-c886-41c9-a21c-801a29d7c6df {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.306266] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac4f49c2-02ae-4f15-a8d6-1529529540a4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.309511] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Waiting for the task: (returnval){ [ 1075.309511] env[69927]: value = "task-4096450" [ 1075.309511] env[69927]: _type = "Task" [ 1075.309511] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.314033] env[69927]: DEBUG oslo_vmware.api [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096449, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.303949} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.318922] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1075.318922] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1075.319169] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1075.319482] env[69927]: INFO nova.compute.manager [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1075.319816] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1075.320112] env[69927]: DEBUG nova.compute.manager [-] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1075.320372] env[69927]: DEBUG nova.network.neutron [-] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1075.329727] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Task: {'id': task-4096450, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.334978] env[69927]: DEBUG nova.virt.block_device [None req-d858fdb5-0026-4197-9917-9f3e927e8273 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Updating existing volume attachment record: 56101266-1cf7-4b9f-a993-b36ffb2692f9 {{(pid=69927) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1075.598622] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.698698] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Releasing lock "refresh_cache-b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1075.698698] env[69927]: DEBUG nova.compute.manager [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Instance network_info: |[{"id": "885a1391-dd28-4b0c-ae60-7ae0c571a32a", "address": "fa:16:3e:ff:97:92", "network": {"id": "5e9a6068-d0f1-44b2-b2a2-b87ebc0d53f2", "bridge": "br-int", "label": "tempest-ServersTestJSON-2067990706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "544f2a021144492ba1aea46ce6075e53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap885a1391-dd", "ovs_interfaceid": "885a1391-dd28-4b0c-ae60-7ae0c571a32a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1075.703253] env[69927]: DEBUG oslo_concurrency.lockutils [req-2ce0af5d-770c-4bdd-9168-2706c01bb35d req-0efb664d-9705-4605-854b-fb4c7c911089 service nova] Acquired lock "refresh_cache-b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1075.703253] env[69927]: DEBUG nova.network.neutron [req-2ce0af5d-770c-4bdd-9168-2706c01bb35d req-0efb664d-9705-4605-854b-fb4c7c911089 service nova] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Refreshing network info cache for port 885a1391-dd28-4b0c-ae60-7ae0c571a32a {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1075.707156] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:97:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ed8a78a1-87dc-488e-a092-afd1c2a2ddde', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '885a1391-dd28-4b0c-ae60-7ae0c571a32a', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1075.717622] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Creating folder: Project (544f2a021144492ba1aea46ce6075e53). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1075.718695] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6af6acf4-67ab-4fb8-bd3d-70e20052c77d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.739367] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Created folder: Project (544f2a021144492ba1aea46ce6075e53) in parent group-v811283. [ 1075.739520] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Creating folder: Instances. Parent ref: group-v811536. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1075.739851] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-28980fb5-f68e-472f-ba47-56e82d5764c8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.756322] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Created folder: Instances in parent group-v811536. [ 1075.756322] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1075.756322] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1075.756322] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8fd3eb7e-ad4f-4b2c-b35a-77d5615e9e5b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.787732] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1075.787732] env[69927]: value = "task-4096454" [ 1075.787732] env[69927]: _type = "Task" [ 1075.787732] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.799950] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096454, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.831993] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Task: {'id': task-4096450, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.101675] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6737ca06-a54f-4128-b5f7-4f280fd13cf2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.111694] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a039bc64-324f-49d7-899e-cc0dbc415176 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.152520] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae45c1b-e410-4ed5-af5b-0a397e3b2d6f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.164593] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed6e142c-e935-4f1d-9eef-e0746b443874 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.180716] env[69927]: DEBUG nova.compute.provider_tree [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1076.300307] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096454, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.307226] env[69927]: DEBUG nova.network.neutron [-] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1076.329837] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Task: {'id': task-4096450, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.789225} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.331584] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] dd4c3963-aa58-49f2-b675-9863ff13bddf/dd4c3963-aa58-49f2-b675-9863ff13bddf.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1076.331584] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1076.331584] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e55eea34-c260-4266-a535-3b7fc7d07867 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.340422] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Waiting for the task: (returnval){ [ 1076.340422] env[69927]: value = "task-4096455" [ 1076.340422] env[69927]: _type = "Task" [ 1076.340422] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.369563] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Task: {'id': task-4096455, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.576260] env[69927]: DEBUG nova.network.neutron [req-2ce0af5d-770c-4bdd-9168-2706c01bb35d req-0efb664d-9705-4605-854b-fb4c7c911089 service nova] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Updated VIF entry in instance network info cache for port 885a1391-dd28-4b0c-ae60-7ae0c571a32a. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1076.577114] env[69927]: DEBUG nova.network.neutron [req-2ce0af5d-770c-4bdd-9168-2706c01bb35d req-0efb664d-9705-4605-854b-fb4c7c911089 service nova] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Updating instance_info_cache with network_info: [{"id": "885a1391-dd28-4b0c-ae60-7ae0c571a32a", "address": "fa:16:3e:ff:97:92", "network": {"id": "5e9a6068-d0f1-44b2-b2a2-b87ebc0d53f2", "bridge": "br-int", "label": "tempest-ServersTestJSON-2067990706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "544f2a021144492ba1aea46ce6075e53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap885a1391-dd", "ovs_interfaceid": "885a1391-dd28-4b0c-ae60-7ae0c571a32a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1076.684260] env[69927]: DEBUG nova.scheduler.client.report [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1076.800221] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096454, 'name': CreateVM_Task, 'duration_secs': 0.594532} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.800221] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1076.800221] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.800221] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1076.800502] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1076.800564] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-817ac906-dd0f-487a-b902-2d3a5ad75c64 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.806736] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1076.806736] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528bf4e5-d04d-81ad-b1b9-4b583dfb6506" [ 1076.806736] env[69927]: _type = "Task" [ 1076.806736] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.812305] env[69927]: INFO nova.compute.manager [-] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Took 1.49 seconds to deallocate network for instance. [ 1076.817643] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528bf4e5-d04d-81ad-b1b9-4b583dfb6506, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.853028] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Task: {'id': task-4096455, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.104185} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.853564] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1076.854484] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55db9dd0-580d-4018-beb9-2e940d08c5cf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.880802] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] dd4c3963-aa58-49f2-b675-9863ff13bddf/dd4c3963-aa58-49f2-b675-9863ff13bddf.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1076.881900] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8155f92a-0bb5-4a21-a411-e769a4ff8de7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.903183] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Waiting for the task: (returnval){ [ 1076.903183] env[69927]: value = "task-4096457" [ 1076.903183] env[69927]: _type = "Task" [ 1076.903183] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.913794] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Task: {'id': task-4096457, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.936206] env[69927]: DEBUG nova.compute.manager [req-221e2cae-ef0f-4557-a8ec-d7b4a620d186 req-68c1c72f-8455-458d-96b6-b4b338ccdf4f service nova] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Received event network-vif-deleted-0780f226-29c3-4879-8d9c-5dfd33960929 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1077.082324] env[69927]: DEBUG oslo_concurrency.lockutils [req-2ce0af5d-770c-4bdd-9168-2706c01bb35d req-0efb664d-9705-4605-854b-fb4c7c911089 service nova] Releasing lock "refresh_cache-b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1077.190584] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.641s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1077.191143] env[69927]: DEBUG nova.compute.manager [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1077.193959] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.641s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1077.194271] env[69927]: DEBUG nova.objects.instance [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Lazy-loading 'resources' on Instance uuid c3a531fd-647c-43b6-9d3d-fc6ecbc2445e {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1077.319269] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528bf4e5-d04d-81ad-b1b9-4b583dfb6506, 'name': SearchDatastore_Task, 'duration_secs': 0.022043} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.319597] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1077.319885] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1077.320338] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1077.320338] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1077.320495] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1077.320833] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13ba9715-0cf4-477e-9d12-16d94e978888 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.323773] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1077.332914] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1077.333129] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1077.333965] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12971329-f824-45bc-a4f2-1230c2e72a1f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.343815] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1077.343815] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]524bd95b-901b-a992-c9eb-b669e8b213c3" [ 1077.343815] env[69927]: _type = "Task" [ 1077.343815] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.354311] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]524bd95b-901b-a992-c9eb-b669e8b213c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.418305] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Task: {'id': task-4096457, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.697988] env[69927]: DEBUG nova.compute.utils [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1077.702670] env[69927]: DEBUG nova.compute.manager [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1077.703766] env[69927]: DEBUG nova.network.neutron [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1077.749023] env[69927]: DEBUG nova.policy [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '51ec047c72c8450abe8f553c52a847f4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef85ff9fc3d240a8a24b6cea8dda0f6f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1077.855767] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]524bd95b-901b-a992-c9eb-b669e8b213c3, 'name': SearchDatastore_Task, 'duration_secs': 0.046483} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.863464] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37511189-e7a6-4b05-8348-cdab39de80e8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.870086] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1077.870086] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5208d2cb-0630-8edb-eee7-5ed280be4505" [ 1077.870086] env[69927]: _type = "Task" [ 1077.870086] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.884864] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5208d2cb-0630-8edb-eee7-5ed280be4505, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.918425] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Task: {'id': task-4096457, 'name': ReconfigVM_Task, 'duration_secs': 0.78673} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.918576] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Reconfigured VM instance instance-00000054 to attach disk [datastore2] dd4c3963-aa58-49f2-b675-9863ff13bddf/dd4c3963-aa58-49f2-b675-9863ff13bddf.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1077.919175] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5820ea79-c5dc-42f8-9bce-e3b53f8f0aad {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.931518] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Waiting for the task: (returnval){ [ 1077.931518] env[69927]: value = "task-4096459" [ 1077.931518] env[69927]: _type = "Task" [ 1077.931518] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.946506] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Task: {'id': task-4096459, 'name': Rename_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.088291] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d75dfacf-cf0b-4c4a-828c-049f8438bfec {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.100655] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3564c8ae-ac22-431d-855a-444c1d5ca2bf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.135116] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cb9d4b0-2f0e-4072-b58d-27b6a5acb965 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.143967] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e462063e-75df-459b-8834-cf5b1de4092a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.159436] env[69927]: DEBUG nova.compute.provider_tree [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1078.203684] env[69927]: DEBUG nova.compute.manager [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1078.217477] env[69927]: DEBUG nova.network.neutron [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Successfully created port: c7cb00d8-8939-41db-b3dd-8dd937a6daf1 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1078.388859] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5208d2cb-0630-8edb-eee7-5ed280be4505, 'name': SearchDatastore_Task, 'duration_secs': 0.015766} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.389322] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1078.389743] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a/b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1078.390150] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-000e0376-9026-4bf6-bc9d-040792e48f88 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.406966] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1078.406966] env[69927]: value = "task-4096460" [ 1078.406966] env[69927]: _type = "Task" [ 1078.406966] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.430838] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096460, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.458689] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Task: {'id': task-4096459, 'name': Rename_Task, 'duration_secs': 0.212479} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.459069] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1078.459438] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bc29bc97-9a86-4163-9a4a-93f6a144d246 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.475444] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Waiting for the task: (returnval){ [ 1078.475444] env[69927]: value = "task-4096461" [ 1078.475444] env[69927]: _type = "Task" [ 1078.475444] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.493548] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Task: {'id': task-4096461, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.667062] env[69927]: DEBUG nova.scheduler.client.report [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1078.914124] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096460, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.986300] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Task: {'id': task-4096461, 'name': PowerOnVM_Task} progress is 71%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.172369] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.978s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.176317] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.451s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.176317] env[69927]: DEBUG nova.objects.instance [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lazy-loading 'resources' on Instance uuid 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1079.203112] env[69927]: INFO nova.scheduler.client.report [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Deleted allocations for instance c3a531fd-647c-43b6-9d3d-fc6ecbc2445e [ 1079.214132] env[69927]: DEBUG nova.compute.manager [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1079.252282] env[69927]: DEBUG nova.virt.hardware [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1079.253598] env[69927]: DEBUG nova.virt.hardware [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1079.253854] env[69927]: DEBUG nova.virt.hardware [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1079.254542] env[69927]: DEBUG nova.virt.hardware [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1079.254780] env[69927]: DEBUG nova.virt.hardware [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1079.255121] env[69927]: DEBUG nova.virt.hardware [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1079.255526] env[69927]: DEBUG nova.virt.hardware [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1079.255782] env[69927]: DEBUG nova.virt.hardware [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1079.256069] env[69927]: DEBUG nova.virt.hardware [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1079.256372] env[69927]: DEBUG nova.virt.hardware [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1079.256643] env[69927]: DEBUG nova.virt.hardware [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1079.259227] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7822bda6-2bc4-4956-bcf3-d3018d2db829 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.278441] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e65c9b19-738a-4d8e-9d94-8a552a661296 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.417584] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096460, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.644803} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.417700] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a/b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1079.417908] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1079.418191] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-46472543-afcb-43d0-a8f8-8df6ceeb184d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.427018] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1079.427018] env[69927]: value = "task-4096463" [ 1079.427018] env[69927]: _type = "Task" [ 1079.427018] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.439158] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096463, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.488518] env[69927]: DEBUG oslo_vmware.api [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Task: {'id': task-4096461, 'name': PowerOnVM_Task, 'duration_secs': 0.878242} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.488855] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1079.489405] env[69927]: INFO nova.compute.manager [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Took 10.30 seconds to spawn the instance on the hypervisor. [ 1079.489405] env[69927]: DEBUG nova.compute.manager [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1079.490406] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaba5dc6-c634-414a-bee5-858f26a53336 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.714597] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3fe24761-cf2c-41c5-b04a-afcc2760cd23 tempest-ServersAdminNegativeTestJSON-439752242 tempest-ServersAdminNegativeTestJSON-439752242-project-member] Lock "c3a531fd-647c-43b6-9d3d-fc6ecbc2445e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.748s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.835919] env[69927]: DEBUG nova.compute.manager [req-12f88a9e-54cf-4c70-b535-3c6a2c62ad8e req-86ea89d9-b346-4491-98e9-b25c21804369 service nova] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Received event network-vif-plugged-c7cb00d8-8939-41db-b3dd-8dd937a6daf1 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1079.835919] env[69927]: DEBUG oslo_concurrency.lockutils [req-12f88a9e-54cf-4c70-b535-3c6a2c62ad8e req-86ea89d9-b346-4491-98e9-b25c21804369 service nova] Acquiring lock "0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.836156] env[69927]: DEBUG oslo_concurrency.lockutils [req-12f88a9e-54cf-4c70-b535-3c6a2c62ad8e req-86ea89d9-b346-4491-98e9-b25c21804369 service nova] Lock "0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.836156] env[69927]: DEBUG oslo_concurrency.lockutils [req-12f88a9e-54cf-4c70-b535-3c6a2c62ad8e req-86ea89d9-b346-4491-98e9-b25c21804369 service nova] Lock "0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.836431] env[69927]: DEBUG nova.compute.manager [req-12f88a9e-54cf-4c70-b535-3c6a2c62ad8e req-86ea89d9-b346-4491-98e9-b25c21804369 service nova] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] No waiting events found dispatching network-vif-plugged-c7cb00d8-8939-41db-b3dd-8dd937a6daf1 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1079.836542] env[69927]: WARNING nova.compute.manager [req-12f88a9e-54cf-4c70-b535-3c6a2c62ad8e req-86ea89d9-b346-4491-98e9-b25c21804369 service nova] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Received unexpected event network-vif-plugged-c7cb00d8-8939-41db-b3dd-8dd937a6daf1 for instance with vm_state building and task_state spawning. [ 1079.891789] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d858fdb5-0026-4197-9917-9f3e927e8273 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Volume attach. Driver type: vmdk {{(pid=69927) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1079.892104] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d858fdb5-0026-4197-9917-9f3e927e8273 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811535', 'volume_id': 'f46710fb-384b-4d23-a1bb-cfb413e80958', 'name': 'volume-f46710fb-384b-4d23-a1bb-cfb413e80958', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0c8e43a3-3f33-4a41-81d3-a98565dca4a7', 'attached_at': '', 'detached_at': '', 'volume_id': 'f46710fb-384b-4d23-a1bb-cfb413e80958', 'serial': 'f46710fb-384b-4d23-a1bb-cfb413e80958'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1079.893116] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d489f1-80aa-4e65-af64-07cf1759fcce {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.915095] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-075a259f-9ca4-40be-a9b1-d3b883a2712c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.945888] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d858fdb5-0026-4197-9917-9f3e927e8273 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] volume-f46710fb-384b-4d23-a1bb-cfb413e80958/volume-f46710fb-384b-4d23-a1bb-cfb413e80958.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1079.946990] env[69927]: DEBUG nova.network.neutron [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Successfully updated port: c7cb00d8-8939-41db-b3dd-8dd937a6daf1 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1079.953974] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eff8e680-ee6e-4bec-a7c9-6b0eacf536bb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.970012] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquiring lock "refresh_cache-0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.970230] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquired lock "refresh_cache-0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1079.970402] env[69927]: DEBUG nova.network.neutron [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1079.977944] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096463, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081781} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.980282] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1079.980830] env[69927]: DEBUG oslo_vmware.api [None req-d858fdb5-0026-4197-9917-9f3e927e8273 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 1079.980830] env[69927]: value = "task-4096464" [ 1079.980830] env[69927]: _type = "Task" [ 1079.980830] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.983728] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c98eb9-d9cc-4240-b6ef-cc1992407e2c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.996359] env[69927]: DEBUG oslo_vmware.api [None req-d858fdb5-0026-4197-9917-9f3e927e8273 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096464, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.017417] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a/b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1080.021846] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b1bdd754-8247-4ee9-a4cd-74fab726ec71 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.040866] env[69927]: INFO nova.compute.manager [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Took 36.89 seconds to build instance. [ 1080.048041] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1080.048041] env[69927]: value = "task-4096465" [ 1080.048041] env[69927]: _type = "Task" [ 1080.048041] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.056669] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096465, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.122495] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93bd3a08-370f-44d8-b8f2-b598f744387e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.134942] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffbf7d81-30a5-47bd-a8b8-869b8772fde1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.166011] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8607c61c-3358-4f72-bd4a-5248692c1936 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.174836] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f631e8bf-9321-47d1-b3ad-9c352b57e13e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.190618] env[69927]: DEBUG nova.compute.provider_tree [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1080.498370] env[69927]: DEBUG oslo_vmware.api [None req-d858fdb5-0026-4197-9917-9f3e927e8273 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096464, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.544013] env[69927]: DEBUG nova.network.neutron [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1080.548177] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3356c948-924a-4d1c-b2c2-99facbb29a47 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Lock "dd4c3963-aa58-49f2-b675-9863ff13bddf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.405s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1080.561340] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096465, 'name': ReconfigVM_Task, 'duration_secs': 0.364788} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.561775] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Reconfigured VM instance instance-00000055 to attach disk [datastore2] b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a/b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1080.562571] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f4a57b14-09cd-4b3c-b719-55eba1aeb11b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.573307] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1080.573307] env[69927]: value = "task-4096466" [ 1080.573307] env[69927]: _type = "Task" [ 1080.573307] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.587186] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096466, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.695845] env[69927]: DEBUG nova.scheduler.client.report [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1080.851230] env[69927]: DEBUG nova.network.neutron [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Updating instance_info_cache with network_info: [{"id": "c7cb00d8-8939-41db-b3dd-8dd937a6daf1", "address": "fa:16:3e:cf:6b:cc", "network": {"id": "16f178eb-5c9f-4d2d-bde1-6816bb4e832b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1614734358-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef85ff9fc3d240a8a24b6cea8dda0f6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7cb00d8-89", "ovs_interfaceid": "c7cb00d8-8939-41db-b3dd-8dd937a6daf1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.942823] env[69927]: DEBUG oslo_concurrency.lockutils [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Acquiring lock "dd4c3963-aa58-49f2-b675-9863ff13bddf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1080.943146] env[69927]: DEBUG oslo_concurrency.lockutils [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Lock "dd4c3963-aa58-49f2-b675-9863ff13bddf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1080.943351] env[69927]: DEBUG oslo_concurrency.lockutils [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Acquiring lock "dd4c3963-aa58-49f2-b675-9863ff13bddf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1080.943561] env[69927]: DEBUG oslo_concurrency.lockutils [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Lock "dd4c3963-aa58-49f2-b675-9863ff13bddf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1080.943772] env[69927]: DEBUG oslo_concurrency.lockutils [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Lock "dd4c3963-aa58-49f2-b675-9863ff13bddf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1080.946089] env[69927]: INFO nova.compute.manager [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Terminating instance [ 1080.998880] env[69927]: DEBUG oslo_vmware.api [None req-d858fdb5-0026-4197-9917-9f3e927e8273 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096464, 'name': ReconfigVM_Task, 'duration_secs': 0.649297} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.999202] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d858fdb5-0026-4197-9917-9f3e927e8273 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Reconfigured VM instance instance-0000004c to attach disk [datastore2] volume-f46710fb-384b-4d23-a1bb-cfb413e80958/volume-f46710fb-384b-4d23-a1bb-cfb413e80958.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1081.004474] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f5801f2e-2809-4e5e-abdb-b588bb6dea97 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.021307] env[69927]: DEBUG oslo_vmware.api [None req-d858fdb5-0026-4197-9917-9f3e927e8273 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 1081.021307] env[69927]: value = "task-4096468" [ 1081.021307] env[69927]: _type = "Task" [ 1081.021307] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.033401] env[69927]: DEBUG oslo_vmware.api [None req-d858fdb5-0026-4197-9917-9f3e927e8273 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096468, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.084173] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096466, 'name': Rename_Task, 'duration_secs': 0.182767} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.084498] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1081.084753] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0a57482e-d92a-4cc2-8041-4ae099302a5f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.092800] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1081.092800] env[69927]: value = "task-4096469" [ 1081.092800] env[69927]: _type = "Task" [ 1081.092800] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.102954] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096469, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.201923] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.026s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.204619] env[69927]: DEBUG oslo_concurrency.lockutils [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.814s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.204888] env[69927]: DEBUG nova.objects.instance [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lazy-loading 'resources' on Instance uuid a9a62523-50fb-44b2-bfc8-9c6664dbf050 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1081.232414] env[69927]: INFO nova.scheduler.client.report [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Deleted allocations for instance 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7 [ 1081.354248] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Releasing lock "refresh_cache-0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1081.354777] env[69927]: DEBUG nova.compute.manager [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Instance network_info: |[{"id": "c7cb00d8-8939-41db-b3dd-8dd937a6daf1", "address": "fa:16:3e:cf:6b:cc", "network": {"id": "16f178eb-5c9f-4d2d-bde1-6816bb4e832b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1614734358-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef85ff9fc3d240a8a24b6cea8dda0f6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7cb00d8-89", "ovs_interfaceid": "c7cb00d8-8939-41db-b3dd-8dd937a6daf1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1081.355248] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cf:6b:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dd7d0d95-6848-4e69-ac21-75f8db82a3b5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c7cb00d8-8939-41db-b3dd-8dd937a6daf1', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1081.363960] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1081.364739] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1081.365082] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a5a9a16-0b0f-495a-b3d9-6e6af93393f7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.386682] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1081.386682] env[69927]: value = "task-4096470" [ 1081.386682] env[69927]: _type = "Task" [ 1081.386682] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.395538] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096470, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.450339] env[69927]: DEBUG nova.compute.manager [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1081.450585] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1081.451562] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb4d51a-a02d-4217-9b84-c5e0aa676b52 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.460427] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1081.460767] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cb06ec89-2220-4e96-a00d-d7567c4ff0b8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.468633] env[69927]: DEBUG oslo_vmware.api [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Waiting for the task: (returnval){ [ 1081.468633] env[69927]: value = "task-4096471" [ 1081.468633] env[69927]: _type = "Task" [ 1081.468633] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.479735] env[69927]: DEBUG oslo_concurrency.lockutils [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Acquiring lock "ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.480112] env[69927]: DEBUG oslo_concurrency.lockutils [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Lock "ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.480389] env[69927]: DEBUG oslo_concurrency.lockutils [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Acquiring lock "ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.480646] env[69927]: DEBUG oslo_concurrency.lockutils [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Lock "ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.480903] env[69927]: DEBUG oslo_concurrency.lockutils [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Lock "ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.483299] env[69927]: DEBUG oslo_vmware.api [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Task: {'id': task-4096471, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.483807] env[69927]: INFO nova.compute.manager [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Terminating instance [ 1081.534158] env[69927]: DEBUG oslo_vmware.api [None req-d858fdb5-0026-4197-9917-9f3e927e8273 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096468, 'name': ReconfigVM_Task, 'duration_secs': 0.184625} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.534509] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d858fdb5-0026-4197-9917-9f3e927e8273 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811535', 'volume_id': 'f46710fb-384b-4d23-a1bb-cfb413e80958', 'name': 'volume-f46710fb-384b-4d23-a1bb-cfb413e80958', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0c8e43a3-3f33-4a41-81d3-a98565dca4a7', 'attached_at': '', 'detached_at': '', 'volume_id': 'f46710fb-384b-4d23-a1bb-cfb413e80958', 'serial': 'f46710fb-384b-4d23-a1bb-cfb413e80958'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1081.604494] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096469, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.741371] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d0456255-dcbd-4bdc-8093-2f39cbaa7d1c tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "7554b5e2-dcc3-421f-9fe9-a309c9aa03b7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.923s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.896427] env[69927]: DEBUG nova.compute.manager [req-ddc4798f-5a2b-4222-9e52-cf9d87f378c2 req-2ce9c84e-aeff-4be6-8cce-af8595f498b4 service nova] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Received event network-changed-c7cb00d8-8939-41db-b3dd-8dd937a6daf1 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1081.896628] env[69927]: DEBUG nova.compute.manager [req-ddc4798f-5a2b-4222-9e52-cf9d87f378c2 req-2ce9c84e-aeff-4be6-8cce-af8595f498b4 service nova] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Refreshing instance network info cache due to event network-changed-c7cb00d8-8939-41db-b3dd-8dd937a6daf1. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1081.896932] env[69927]: DEBUG oslo_concurrency.lockutils [req-ddc4798f-5a2b-4222-9e52-cf9d87f378c2 req-2ce9c84e-aeff-4be6-8cce-af8595f498b4 service nova] Acquiring lock "refresh_cache-0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.897305] env[69927]: DEBUG oslo_concurrency.lockutils [req-ddc4798f-5a2b-4222-9e52-cf9d87f378c2 req-2ce9c84e-aeff-4be6-8cce-af8595f498b4 service nova] Acquired lock "refresh_cache-0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1081.897524] env[69927]: DEBUG nova.network.neutron [req-ddc4798f-5a2b-4222-9e52-cf9d87f378c2 req-2ce9c84e-aeff-4be6-8cce-af8595f498b4 service nova] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Refreshing network info cache for port c7cb00d8-8939-41db-b3dd-8dd937a6daf1 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1081.907439] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096470, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.985993] env[69927]: DEBUG oslo_vmware.api [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Task: {'id': task-4096471, 'name': PowerOffVM_Task, 'duration_secs': 0.472389} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.985993] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1081.985993] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1081.986464] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e06767de-e146-4e8c-b656-c7e375439ee3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.988736] env[69927]: DEBUG nova.compute.manager [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1081.988736] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1081.993410] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade1b9f4-8c71-4f42-bd09-715295ff7c6d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.003277] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1082.004602] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-31e33783-cd4f-41e4-859e-8dce8b4e7ec6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.011488] env[69927]: DEBUG oslo_vmware.api [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for the task: (returnval){ [ 1082.011488] env[69927]: value = "task-4096473" [ 1082.011488] env[69927]: _type = "Task" [ 1082.011488] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.021037] env[69927]: DEBUG oslo_vmware.api [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096473, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.072700] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Acquiring lock "406828cc-c6aa-4686-827d-c7c8e28ffb8e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.073146] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Lock "406828cc-c6aa-4686-827d-c7c8e28ffb8e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.075109] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d908d7a3-de75-4865-b9d0-9733a29549ff {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.080009] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1082.080288] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1082.080481] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Deleting the datastore file [datastore2] dd4c3963-aa58-49f2-b675-9863ff13bddf {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1082.081185] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2fce6482-eb50-48b4-b921-e07b42498b01 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.087253] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d804bd65-4381-4af4-82b2-26eb3257dc33 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.094443] env[69927]: DEBUG oslo_vmware.api [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Waiting for the task: (returnval){ [ 1082.094443] env[69927]: value = "task-4096474" [ 1082.094443] env[69927]: _type = "Task" [ 1082.094443] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.132380] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c20a8f-bba0-4cf0-b4b5-afc2fbc0f254 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.141728] env[69927]: DEBUG oslo_vmware.api [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Task: {'id': task-4096474, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.141986] env[69927]: DEBUG oslo_vmware.api [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096469, 'name': PowerOnVM_Task, 'duration_secs': 0.520579} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.142655] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1082.142986] env[69927]: INFO nova.compute.manager [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Took 9.87 seconds to spawn the instance on the hypervisor. [ 1082.143109] env[69927]: DEBUG nova.compute.manager [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1082.144011] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfae1a07-b0c2-4dbf-8ab6-c2c6730075ae {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.151441] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee1632b-50ee-4a49-b19f-828403c2842c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.173366] env[69927]: DEBUG nova.compute.provider_tree [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1082.403056] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096470, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.521568] env[69927]: DEBUG oslo_vmware.api [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096473, 'name': PowerOffVM_Task, 'duration_secs': 0.331396} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.521868] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1082.522075] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1082.522961] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0de6a58a-0899-4c3d-987b-4544bf4576fa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.581760] env[69927]: DEBUG nova.compute.manager [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1082.601268] env[69927]: DEBUG nova.objects.instance [None req-d858fdb5-0026-4197-9917-9f3e927e8273 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lazy-loading 'flavor' on Instance uuid 0c8e43a3-3f33-4a41-81d3-a98565dca4a7 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1082.604132] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1082.604350] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1082.604535] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Deleting the datastore file [datastore1] ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1082.605528] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b390cd0-c757-4466-a026-1b440b52002a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.612232] env[69927]: DEBUG oslo_vmware.api [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Task: {'id': task-4096474, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.287289} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.612896] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1082.615023] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1082.615023] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1082.615023] env[69927]: INFO nova.compute.manager [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1082.615023] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1082.615023] env[69927]: DEBUG nova.compute.manager [-] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1082.615023] env[69927]: DEBUG nova.network.neutron [-] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1082.617983] env[69927]: DEBUG oslo_vmware.api [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for the task: (returnval){ [ 1082.617983] env[69927]: value = "task-4096476" [ 1082.617983] env[69927]: _type = "Task" [ 1082.617983] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.629061] env[69927]: DEBUG oslo_vmware.api [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096476, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.673016] env[69927]: INFO nova.compute.manager [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Took 36.45 seconds to build instance. [ 1082.678181] env[69927]: DEBUG nova.scheduler.client.report [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1082.713365] env[69927]: DEBUG nova.network.neutron [req-ddc4798f-5a2b-4222-9e52-cf9d87f378c2 req-2ce9c84e-aeff-4be6-8cce-af8595f498b4 service nova] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Updated VIF entry in instance network info cache for port c7cb00d8-8939-41db-b3dd-8dd937a6daf1. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1082.713745] env[69927]: DEBUG nova.network.neutron [req-ddc4798f-5a2b-4222-9e52-cf9d87f378c2 req-2ce9c84e-aeff-4be6-8cce-af8595f498b4 service nova] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Updating instance_info_cache with network_info: [{"id": "c7cb00d8-8939-41db-b3dd-8dd937a6daf1", "address": "fa:16:3e:cf:6b:cc", "network": {"id": "16f178eb-5c9f-4d2d-bde1-6816bb4e832b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1614734358-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef85ff9fc3d240a8a24b6cea8dda0f6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7cb00d8-89", "ovs_interfaceid": "c7cb00d8-8939-41db-b3dd-8dd937a6daf1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.901197] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096470, 'name': CreateVM_Task, 'duration_secs': 1.155155} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.901551] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1082.902427] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.902754] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1082.903198] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1082.903583] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4017b15e-61a3-4e84-ba47-592cfd260efc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.913317] env[69927]: DEBUG oslo_vmware.api [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1082.913317] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52723391-13d2-c03e-ea98-e1d6da8f37b5" [ 1082.913317] env[69927]: _type = "Task" [ 1082.913317] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.922737] env[69927]: DEBUG oslo_vmware.api [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52723391-13d2-c03e-ea98-e1d6da8f37b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.935488] env[69927]: DEBUG nova.compute.manager [req-6984ff3d-6b4b-4741-9fcb-4b3329c41bda req-1f86ad9f-430e-4610-9997-f9222f452087 service nova] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Received event network-vif-deleted-65e8d7bd-d544-4d00-974f-b16cf425a4dc {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1082.935706] env[69927]: INFO nova.compute.manager [req-6984ff3d-6b4b-4741-9fcb-4b3329c41bda req-1f86ad9f-430e-4610-9997-f9222f452087 service nova] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Neutron deleted interface 65e8d7bd-d544-4d00-974f-b16cf425a4dc; detaching it from the instance and deleting it from the info cache [ 1082.935881] env[69927]: DEBUG nova.network.neutron [req-6984ff3d-6b4b-4741-9fcb-4b3329c41bda req-1f86ad9f-430e-4610-9997-f9222f452087 service nova] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.111738] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d858fdb5-0026-4197-9917-9f3e927e8273 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.863s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.115750] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.131708] env[69927]: DEBUG oslo_vmware.api [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096476, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181167} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.132462] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1083.132462] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1083.132462] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1083.132740] env[69927]: INFO nova.compute.manager [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1083.132861] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1083.133392] env[69927]: DEBUG nova.compute.manager [-] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1083.133392] env[69927]: DEBUG nova.network.neutron [-] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1083.175222] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa796942-b0d9-4a65-b7df-e930c928efbc tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.963s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.198561] env[69927]: DEBUG oslo_concurrency.lockutils [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.991s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.203166] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.639s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.203855] env[69927]: DEBUG nova.objects.instance [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Lazy-loading 'resources' on Instance uuid 3936a3db-4afa-4a37-9d63-8c18b6b72c72 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1083.209212] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e274447f-21e2-40aa-8967-778adf370c02 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "e1b3d0bc-a251-4dbd-89a6-216a2f2c1313" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.209212] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e274447f-21e2-40aa-8967-778adf370c02 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "e1b3d0bc-a251-4dbd-89a6-216a2f2c1313" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.216294] env[69927]: DEBUG oslo_concurrency.lockutils [req-ddc4798f-5a2b-4222-9e52-cf9d87f378c2 req-2ce9c84e-aeff-4be6-8cce-af8595f498b4 service nova] Releasing lock "refresh_cache-0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.230230] env[69927]: INFO nova.scheduler.client.report [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Deleted allocations for instance a9a62523-50fb-44b2-bfc8-9c6664dbf050 [ 1083.380033] env[69927]: DEBUG nova.network.neutron [-] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.423353] env[69927]: DEBUG oslo_vmware.api [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52723391-13d2-c03e-ea98-e1d6da8f37b5, 'name': SearchDatastore_Task, 'duration_secs': 0.013489} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.424206] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.424206] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1083.424206] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.424513] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.424513] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1083.425195] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0addca6d-968b-45f5-9866-4de0c63caaae {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.436775] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1083.436962] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1083.437819] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03ad62ca-3f54-4097-b2da-725e22f86453 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.441256] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-65ef2f58-64a4-4bde-b245-ffebcc4166a2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.448022] env[69927]: DEBUG oslo_vmware.api [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1083.448022] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525c1c55-67d4-a6c5-2016-45693853f2a4" [ 1083.448022] env[69927]: _type = "Task" [ 1083.448022] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.456328] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c8a36d5-84c8-4de1-8233-b63c08522829 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.483744] env[69927]: DEBUG oslo_vmware.api [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525c1c55-67d4-a6c5-2016-45693853f2a4, 'name': SearchDatastore_Task, 'duration_secs': 0.020625} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.484653] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ea8db69-49d9-4820-b969-47b25dcf1efc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.493240] env[69927]: DEBUG oslo_vmware.api [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1083.493240] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5265fc4f-3c75-e336-4085-e08e66a1d131" [ 1083.493240] env[69927]: _type = "Task" [ 1083.493240] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.512029] env[69927]: DEBUG nova.compute.manager [req-6984ff3d-6b4b-4741-9fcb-4b3329c41bda req-1f86ad9f-430e-4610-9997-f9222f452087 service nova] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Detach interface failed, port_id=65e8d7bd-d544-4d00-974f-b16cf425a4dc, reason: Instance dd4c3963-aa58-49f2-b675-9863ff13bddf could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1083.520156] env[69927]: DEBUG oslo_vmware.api [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5265fc4f-3c75-e336-4085-e08e66a1d131, 'name': SearchDatastore_Task, 'duration_secs': 0.017803} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.521245] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.521245] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5/0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1083.521245] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6f6a1524-8937-4803-91c8-73a7b4eeffbd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.530870] env[69927]: DEBUG oslo_vmware.api [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1083.530870] env[69927]: value = "task-4096478" [ 1083.530870] env[69927]: _type = "Task" [ 1083.530870] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.541183] env[69927]: DEBUG oslo_vmware.api [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096478, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.714963] env[69927]: INFO nova.compute.manager [None req-e274447f-21e2-40aa-8967-778adf370c02 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Detaching volume bcac983c-b893-4914-9a04-f6f06dd0347e [ 1083.740296] env[69927]: DEBUG oslo_concurrency.lockutils [None req-524a524d-df64-487e-b0e1-df35051a6496 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "a9a62523-50fb-44b2-bfc8-9c6664dbf050" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.421s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.764859] env[69927]: INFO nova.virt.block_device [None req-e274447f-21e2-40aa-8967-778adf370c02 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Attempting to driver detach volume bcac983c-b893-4914-9a04-f6f06dd0347e from mountpoint /dev/sdb [ 1083.764859] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-e274447f-21e2-40aa-8967-778adf370c02 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Volume detach. Driver type: vmdk {{(pid=69927) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1083.764859] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-e274447f-21e2-40aa-8967-778adf370c02 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811465', 'volume_id': 'bcac983c-b893-4914-9a04-f6f06dd0347e', 'name': 'volume-bcac983c-b893-4914-9a04-f6f06dd0347e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e1b3d0bc-a251-4dbd-89a6-216a2f2c1313', 'attached_at': '', 'detached_at': '', 'volume_id': 'bcac983c-b893-4914-9a04-f6f06dd0347e', 'serial': 'bcac983c-b893-4914-9a04-f6f06dd0347e'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1083.766291] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be9dee0e-48a8-4fc1-8c92-3fdac1579f2d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.793418] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c5454a6-ad6e-4ecf-883d-f1149478de0d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.803043] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117d611f-cc6e-4349-9470-c457069ed9bd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.834216] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273e9d8a-930d-4dac-81ec-9bd83875c133 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.853430] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-e274447f-21e2-40aa-8967-778adf370c02 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] The volume has not been displaced from its original location: [datastore1] volume-bcac983c-b893-4914-9a04-f6f06dd0347e/volume-bcac983c-b893-4914-9a04-f6f06dd0347e.vmdk. No consolidation needed. {{(pid=69927) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1083.858975] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-e274447f-21e2-40aa-8967-778adf370c02 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Reconfiguring VM instance instance-0000002a to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1083.862632] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee4e04a8-a885-4b50-9c93-57d628a5cb1a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.882663] env[69927]: INFO nova.compute.manager [-] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Took 1.27 seconds to deallocate network for instance. [ 1083.882828] env[69927]: DEBUG oslo_vmware.api [None req-e274447f-21e2-40aa-8967-778adf370c02 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1083.882828] env[69927]: value = "task-4096479" [ 1083.882828] env[69927]: _type = "Task" [ 1083.882828] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.905734] env[69927]: DEBUG oslo_vmware.api [None req-e274447f-21e2-40aa-8967-778adf370c02 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096479, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.047125] env[69927]: DEBUG oslo_vmware.api [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096478, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.100375] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4f16a82e-0aa6-46f0-b742-43a7e716a2d7 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.100736] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4f16a82e-0aa6-46f0-b742-43a7e716a2d7 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1084.163859] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25fbea1d-fd64-4e06-afde-ce892716f123 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.173769] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d47933-e894-496d-bd62-67cff397a21a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.207570] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d5663e0-ea6d-4e27-aa15-f37dd262337b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.216794] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-511ea3a6-ed99-404d-8b81-af3c66827e77 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.234977] env[69927]: DEBUG nova.compute.provider_tree [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1084.397789] env[69927]: DEBUG oslo_concurrency.lockutils [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.398170] env[69927]: DEBUG oslo_vmware.api [None req-e274447f-21e2-40aa-8967-778adf370c02 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096479, 'name': ReconfigVM_Task, 'duration_secs': 0.285921} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.398408] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-e274447f-21e2-40aa-8967-778adf370c02 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Reconfigured VM instance instance-0000002a to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1084.404444] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4adc2be2-5990-4c37-99c9-4b1bc495d6e7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.421759] env[69927]: DEBUG oslo_vmware.api [None req-e274447f-21e2-40aa-8967-778adf370c02 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1084.421759] env[69927]: value = "task-4096480" [ 1084.421759] env[69927]: _type = "Task" [ 1084.421759] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.439370] env[69927]: DEBUG oslo_vmware.api [None req-e274447f-21e2-40aa-8967-778adf370c02 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096480, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.479500] env[69927]: DEBUG nova.network.neutron [-] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.544927] env[69927]: DEBUG oslo_vmware.api [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096478, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.722642} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.545282] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5/0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1084.548465] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1084.548465] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f981e2a0-e1a2-4149-9eee-1f55ac5c0029 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.554758] env[69927]: DEBUG oslo_vmware.api [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1084.554758] env[69927]: value = "task-4096481" [ 1084.554758] env[69927]: _type = "Task" [ 1084.554758] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.568640] env[69927]: DEBUG oslo_vmware.api [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096481, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.605803] env[69927]: INFO nova.compute.manager [None req-4f16a82e-0aa6-46f0-b742-43a7e716a2d7 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Detaching volume 5e2d9a37-f003-4747-83b8-b7da94e44cee [ 1084.651978] env[69927]: DEBUG oslo_vmware.rw_handles [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52deee24-7c5a-527e-58f9-6533179d5abd/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1084.653201] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09c0b067-dbc3-49b0-b1c7-3cca49433b6d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.659269] env[69927]: INFO nova.virt.block_device [None req-4f16a82e-0aa6-46f0-b742-43a7e716a2d7 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Attempting to driver detach volume 5e2d9a37-f003-4747-83b8-b7da94e44cee from mountpoint /dev/sdb [ 1084.660474] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f16a82e-0aa6-46f0-b742-43a7e716a2d7 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Volume detach. Driver type: vmdk {{(pid=69927) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1084.661409] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f16a82e-0aa6-46f0-b742-43a7e716a2d7 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811526', 'volume_id': '5e2d9a37-f003-4747-83b8-b7da94e44cee', 'name': 'volume-5e2d9a37-f003-4747-83b8-b7da94e44cee', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0c8e43a3-3f33-4a41-81d3-a98565dca4a7', 'attached_at': '', 'detached_at': '', 'volume_id': '5e2d9a37-f003-4747-83b8-b7da94e44cee', 'serial': '5e2d9a37-f003-4747-83b8-b7da94e44cee'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1084.663605] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c202a9aa-d8f5-43ec-a8ce-17c40edfd158 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.692691] env[69927]: DEBUG oslo_vmware.rw_handles [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52deee24-7c5a-527e-58f9-6533179d5abd/disk-0.vmdk is in state: ready. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1084.692894] env[69927]: ERROR oslo_vmware.rw_handles [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52deee24-7c5a-527e-58f9-6533179d5abd/disk-0.vmdk due to incomplete transfer. [ 1084.693242] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-80415688-3410-4923-8f38-079e006a8479 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.695418] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d45c7e-6efc-44f0-97ea-e48a10398b4b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.703278] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07150fea-b670-4c33-b61d-6488cccaf114 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.707389] env[69927]: DEBUG oslo_vmware.rw_handles [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52deee24-7c5a-527e-58f9-6533179d5abd/disk-0.vmdk. {{(pid=69927) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1084.707612] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Uploaded image 3b121e58-ab8b-4cf1-84df-d0dcd99b40cb to the Glance image server {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1084.709667] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Destroying the VM {{(pid=69927) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1084.710449] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e643da4b-0460-46d2-9e68-9ceb6247676b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.737713] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc524e5-c70a-4d0e-9b90-f65a5af79c6e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.740592] env[69927]: DEBUG oslo_vmware.api [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1084.740592] env[69927]: value = "task-4096482" [ 1084.740592] env[69927]: _type = "Task" [ 1084.740592] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.741483] env[69927]: DEBUG nova.scheduler.client.report [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1084.762939] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f16a82e-0aa6-46f0-b742-43a7e716a2d7 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] The volume has not been displaced from its original location: [datastore2] volume-5e2d9a37-f003-4747-83b8-b7da94e44cee/volume-5e2d9a37-f003-4747-83b8-b7da94e44cee.vmdk. No consolidation needed. {{(pid=69927) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1084.769733] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f16a82e-0aa6-46f0-b742-43a7e716a2d7 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Reconfiguring VM instance instance-0000004c to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1084.770888] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b627d518-5fb2-4188-9781-d48a3271a729 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.788260] env[69927]: DEBUG oslo_vmware.api [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096482, 'name': Destroy_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.797726] env[69927]: DEBUG oslo_vmware.api [None req-4f16a82e-0aa6-46f0-b742-43a7e716a2d7 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 1084.797726] env[69927]: value = "task-4096483" [ 1084.797726] env[69927]: _type = "Task" [ 1084.797726] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.809064] env[69927]: DEBUG oslo_vmware.api [None req-4f16a82e-0aa6-46f0-b742-43a7e716a2d7 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096483, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.932930] env[69927]: DEBUG oslo_vmware.api [None req-e274447f-21e2-40aa-8967-778adf370c02 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096480, 'name': ReconfigVM_Task, 'duration_secs': 0.185583} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.933269] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-e274447f-21e2-40aa-8967-778adf370c02 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811465', 'volume_id': 'bcac983c-b893-4914-9a04-f6f06dd0347e', 'name': 'volume-bcac983c-b893-4914-9a04-f6f06dd0347e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e1b3d0bc-a251-4dbd-89a6-216a2f2c1313', 'attached_at': '', 'detached_at': '', 'volume_id': 'bcac983c-b893-4914-9a04-f6f06dd0347e', 'serial': 'bcac983c-b893-4914-9a04-f6f06dd0347e'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1084.971860] env[69927]: DEBUG nova.compute.manager [req-20a41cde-4c9a-4852-b635-480a035c9926 req-62e3e878-3b49-4292-918e-94ea0a9be7b3 service nova] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Received event network-vif-deleted-8c5792b5-80a0-4414-bb3b-ae6e25874202 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1084.981920] env[69927]: INFO nova.compute.manager [-] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Took 1.85 seconds to deallocate network for instance. [ 1085.065083] env[69927]: DEBUG oslo_vmware.api [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096481, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076039} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.065959] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1085.066237] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da8d7da-e451-4838-a618-74983795d0b1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.089313] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5/0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1085.090011] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2a9ee0b-30d0-4a17-9290-ae4c716f9e60 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.112673] env[69927]: DEBUG oslo_vmware.api [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1085.112673] env[69927]: value = "task-4096485" [ 1085.112673] env[69927]: _type = "Task" [ 1085.112673] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.121252] env[69927]: DEBUG oslo_vmware.api [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096485, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.248746] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.045s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.257562] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 27.544s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.257868] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.258708] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69927) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1085.258708] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.512s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.261481] env[69927]: INFO nova.compute.claims [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1085.266552] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60555313-4950-4839-baf4-4affbde78b77 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.277222] env[69927]: DEBUG oslo_vmware.api [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096482, 'name': Destroy_Task, 'duration_secs': 0.390882} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.277949] env[69927]: INFO nova.scheduler.client.report [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Deleted allocations for instance 3936a3db-4afa-4a37-9d63-8c18b6b72c72 [ 1085.282626] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Destroyed the VM [ 1085.282626] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Deleting Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1085.284778] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ba884670-14be-47df-8328-ac741ae36462 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.292057] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3209ccf8-f7e1-4e9f-a91e-399c917ce661 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.298601] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "21b7b237-557e-4030-93bb-6b5ce417e53c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.298861] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "21b7b237-557e-4030-93bb-6b5ce417e53c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.299057] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "21b7b237-557e-4030-93bb-6b5ce417e53c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.299507] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "21b7b237-557e-4030-93bb-6b5ce417e53c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.299507] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "21b7b237-557e-4030-93bb-6b5ce417e53c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.320268] env[69927]: INFO nova.compute.manager [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Terminating instance [ 1085.322192] env[69927]: DEBUG oslo_vmware.api [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1085.322192] env[69927]: value = "task-4096486" [ 1085.322192] env[69927]: _type = "Task" [ 1085.322192] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.324937] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c2f2d2a-5604-49ee-9dc2-359bded2305b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.338527] env[69927]: DEBUG oslo_vmware.api [None req-4f16a82e-0aa6-46f0-b742-43a7e716a2d7 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096483, 'name': ReconfigVM_Task, 'duration_secs': 0.24883} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.339827] env[69927]: DEBUG oslo_concurrency.lockutils [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "b422d5c9-f580-4d07-9d13-af307571bf48" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.339997] env[69927]: DEBUG oslo_concurrency.lockutils [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "b422d5c9-f580-4d07-9d13-af307571bf48" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.341475] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f16a82e-0aa6-46f0-b742-43a7e716a2d7 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Reconfigured VM instance instance-0000004c to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1085.349628] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4617619-c0df-4036-930b-098815613599 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.365560] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-655c5620-da81-4d3c-ac8e-30039644e311 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.369524] env[69927]: DEBUG oslo_vmware.api [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096486, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.400213] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178808MB free_disk=17GB free_vcpus=48 pci_devices=None {{(pid=69927) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1085.400482] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.403205] env[69927]: DEBUG oslo_vmware.api [None req-4f16a82e-0aa6-46f0-b742-43a7e716a2d7 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 1085.403205] env[69927]: value = "task-4096487" [ 1085.403205] env[69927]: _type = "Task" [ 1085.403205] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.420190] env[69927]: DEBUG oslo_vmware.api [None req-4f16a82e-0aa6-46f0-b742-43a7e716a2d7 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096487, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.482664] env[69927]: DEBUG nova.objects.instance [None req-e274447f-21e2-40aa-8967-778adf370c02 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lazy-loading 'flavor' on Instance uuid e1b3d0bc-a251-4dbd-89a6-216a2f2c1313 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1085.489687] env[69927]: DEBUG oslo_concurrency.lockutils [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.623577] env[69927]: DEBUG oslo_vmware.api [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096485, 'name': ReconfigVM_Task, 'duration_secs': 0.301746} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.623954] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Reconfigured VM instance instance-00000056 to attach disk [datastore2] 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5/0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1085.624643] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ad9a1388-024c-45b5-8691-72721f59a4dc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.632598] env[69927]: DEBUG oslo_vmware.api [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1085.632598] env[69927]: value = "task-4096488" [ 1085.632598] env[69927]: _type = "Task" [ 1085.632598] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.644289] env[69927]: DEBUG oslo_vmware.api [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096488, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.801951] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b8438293-e822-422c-bdd3-b7c6868c299c tempest-ServerAddressesNegativeTestJSON-572648168 tempest-ServerAddressesNegativeTestJSON-572648168-project-member] Lock "3936a3db-4afa-4a37-9d63-8c18b6b72c72" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.244s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.829812] env[69927]: DEBUG nova.compute.manager [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1085.830022] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1085.830844] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e5b83e-a169-4c49-9070-8fa09b5e0d42 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.836778] env[69927]: DEBUG oslo_vmware.api [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096486, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.841574] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1085.841791] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-07544b4a-2994-4f2f-a74b-8c74411cf4de {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.846975] env[69927]: DEBUG nova.compute.manager [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1085.852268] env[69927]: DEBUG oslo_vmware.api [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 1085.852268] env[69927]: value = "task-4096489" [ 1085.852268] env[69927]: _type = "Task" [ 1085.852268] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.861025] env[69927]: DEBUG oslo_vmware.api [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096489, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.915710] env[69927]: DEBUG oslo_vmware.api [None req-4f16a82e-0aa6-46f0-b742-43a7e716a2d7 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096487, 'name': ReconfigVM_Task, 'duration_secs': 0.173186} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.915710] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f16a82e-0aa6-46f0-b742-43a7e716a2d7 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811526', 'volume_id': '5e2d9a37-f003-4747-83b8-b7da94e44cee', 'name': 'volume-5e2d9a37-f003-4747-83b8-b7da94e44cee', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0c8e43a3-3f33-4a41-81d3-a98565dca4a7', 'attached_at': '', 'detached_at': '', 'volume_id': '5e2d9a37-f003-4747-83b8-b7da94e44cee', 'serial': '5e2d9a37-f003-4747-83b8-b7da94e44cee'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1086.143557] env[69927]: DEBUG oslo_vmware.api [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096488, 'name': Rename_Task, 'duration_secs': 0.154728} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.143874] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1086.144226] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8b1ca060-6e47-497c-b7be-3d5416c37c33 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.152922] env[69927]: DEBUG oslo_vmware.api [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1086.152922] env[69927]: value = "task-4096490" [ 1086.152922] env[69927]: _type = "Task" [ 1086.152922] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.165242] env[69927]: DEBUG oslo_vmware.api [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096490, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.337538] env[69927]: DEBUG oslo_vmware.api [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096486, 'name': RemoveSnapshot_Task, 'duration_secs': 0.553159} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.337821] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Deleted Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1086.338042] env[69927]: INFO nova.compute.manager [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Took 16.03 seconds to snapshot the instance on the hypervisor. [ 1086.368376] env[69927]: DEBUG oslo_vmware.api [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096489, 'name': PowerOffVM_Task, 'duration_secs': 0.481759} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.368376] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1086.368376] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1086.368587] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0ca1fbe5-81e0-458b-ae02-37b5dbe55762 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.372551] env[69927]: DEBUG oslo_concurrency.lockutils [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.471662] env[69927]: DEBUG nova.objects.instance [None req-4f16a82e-0aa6-46f0-b742-43a7e716a2d7 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lazy-loading 'flavor' on Instance uuid 0c8e43a3-3f33-4a41-81d3-a98565dca4a7 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1086.490955] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e274447f-21e2-40aa-8967-778adf370c02 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "e1b3d0bc-a251-4dbd-89a6-216a2f2c1313" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.284s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.616828] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b593cc12-019f-4ffe-b5d7-25a74098dacf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.625689] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa631835-a6fa-4912-936e-042deb2b4212 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.668469] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-478b478d-6a23-4a25-9396-0aba2214b1d2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.670197] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1086.670416] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1086.670597] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Deleting the datastore file [datastore2] 21b7b237-557e-4030-93bb-6b5ce417e53c {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1086.671201] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4f0fb232-5b1a-445f-9a74-5d9f57d7f157 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.680215] env[69927]: DEBUG oslo_vmware.api [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096490, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.683197] env[69927]: DEBUG oslo_vmware.api [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 1086.683197] env[69927]: value = "task-4096492" [ 1086.683197] env[69927]: _type = "Task" [ 1086.683197] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.684457] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d9ddcc-b6d9-4121-89ff-08951392c512 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.704316] env[69927]: DEBUG oslo_vmware.api [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096492, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.704828] env[69927]: DEBUG nova.compute.provider_tree [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1086.898699] env[69927]: DEBUG nova.compute.manager [None req-3c753d13-f19d-4923-8d71-96875b03175e tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Found 2 images (rotation: 2) {{(pid=69927) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1087.179641] env[69927]: DEBUG oslo_vmware.api [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096490, 'name': PowerOnVM_Task} progress is 90%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.204932] env[69927]: DEBUG oslo_vmware.api [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096492, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142691} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.204932] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1087.204932] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1087.204932] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1087.204932] env[69927]: INFO nova.compute.manager [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Took 1.37 seconds to destroy the instance on the hypervisor. [ 1087.204932] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1087.204932] env[69927]: DEBUG nova.compute.manager [-] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1087.204932] env[69927]: DEBUG nova.network.neutron [-] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1087.210218] env[69927]: DEBUG nova.scheduler.client.report [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1087.482982] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4f16a82e-0aa6-46f0-b742-43a7e716a2d7 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.382s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.588900] env[69927]: DEBUG nova.compute.manager [req-7cde6e61-be3a-480c-b8b9-ebfb2cc9bfea req-ffd7d0af-3658-427d-8ac3-3931c88d1bc6 service nova] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Received event network-vif-deleted-6337fd25-9a6d-4947-a0dd-1a56aac7beb3 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1087.589271] env[69927]: INFO nova.compute.manager [req-7cde6e61-be3a-480c-b8b9-ebfb2cc9bfea req-ffd7d0af-3658-427d-8ac3-3931c88d1bc6 service nova] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Neutron deleted interface 6337fd25-9a6d-4947-a0dd-1a56aac7beb3; detaching it from the instance and deleting it from the info cache [ 1087.589468] env[69927]: DEBUG nova.network.neutron [req-7cde6e61-be3a-480c-b8b9-ebfb2cc9bfea req-ffd7d0af-3658-427d-8ac3-3931c88d1bc6 service nova] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.635297] env[69927]: DEBUG oslo_concurrency.lockutils [None req-78a07679-7c06-4d34-ba51-4f4bcb2ceb4e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1087.635297] env[69927]: DEBUG oslo_concurrency.lockutils [None req-78a07679-7c06-4d34-ba51-4f4bcb2ceb4e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.651828] env[69927]: DEBUG nova.compute.manager [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1087.652901] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1fdb4c-6d5b-444e-84bd-c79b7198082e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.673638] env[69927]: DEBUG oslo_vmware.api [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096490, 'name': PowerOnVM_Task, 'duration_secs': 1.11298} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.673945] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1087.674168] env[69927]: INFO nova.compute.manager [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Took 8.46 seconds to spawn the instance on the hypervisor. [ 1087.674353] env[69927]: DEBUG nova.compute.manager [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1087.675145] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c3d998d-ed51-4ac7-bb9a-8ba235177d12 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.714813] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.456s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.715385] env[69927]: DEBUG nova.compute.manager [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1087.718733] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.761s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.719110] env[69927]: DEBUG nova.objects.instance [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lazy-loading 'resources' on Instance uuid 0f5643d4-52f3-4cba-b71b-9c4370175e35 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1088.016751] env[69927]: DEBUG oslo_concurrency.lockutils [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "e1b3d0bc-a251-4dbd-89a6-216a2f2c1313" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.016751] env[69927]: DEBUG oslo_concurrency.lockutils [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "e1b3d0bc-a251-4dbd-89a6-216a2f2c1313" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.016751] env[69927]: DEBUG oslo_concurrency.lockutils [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "e1b3d0bc-a251-4dbd-89a6-216a2f2c1313-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.016751] env[69927]: DEBUG oslo_concurrency.lockutils [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "e1b3d0bc-a251-4dbd-89a6-216a2f2c1313-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.017709] env[69927]: DEBUG oslo_concurrency.lockutils [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "e1b3d0bc-a251-4dbd-89a6-216a2f2c1313-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.019703] env[69927]: INFO nova.compute.manager [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Terminating instance [ 1088.072248] env[69927]: DEBUG nova.network.neutron [-] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.092621] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b907ca88-98fd-46f0-84a1-e7767b220f33 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.103277] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d42c59-053e-48ba-b927-4ca20019119d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.137022] env[69927]: DEBUG nova.compute.manager [req-7cde6e61-be3a-480c-b8b9-ebfb2cc9bfea req-ffd7d0af-3658-427d-8ac3-3931c88d1bc6 service nova] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Detach interface failed, port_id=6337fd25-9a6d-4947-a0dd-1a56aac7beb3, reason: Instance 21b7b237-557e-4030-93bb-6b5ce417e53c could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1088.139087] env[69927]: INFO nova.compute.manager [None req-78a07679-7c06-4d34-ba51-4f4bcb2ceb4e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Detaching volume f46710fb-384b-4d23-a1bb-cfb413e80958 [ 1088.168372] env[69927]: INFO nova.compute.manager [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] instance snapshotting [ 1088.169311] env[69927]: DEBUG nova.objects.instance [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lazy-loading 'flavor' on Instance uuid 4b7934f8-2c97-480b-8af7-f09f6819e2b6 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1088.175895] env[69927]: INFO nova.virt.block_device [None req-78a07679-7c06-4d34-ba51-4f4bcb2ceb4e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Attempting to driver detach volume f46710fb-384b-4d23-a1bb-cfb413e80958 from mountpoint /dev/sdc [ 1088.175895] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-78a07679-7c06-4d34-ba51-4f4bcb2ceb4e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Volume detach. Driver type: vmdk {{(pid=69927) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1088.175895] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-78a07679-7c06-4d34-ba51-4f4bcb2ceb4e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811535', 'volume_id': 'f46710fb-384b-4d23-a1bb-cfb413e80958', 'name': 'volume-f46710fb-384b-4d23-a1bb-cfb413e80958', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0c8e43a3-3f33-4a41-81d3-a98565dca4a7', 'attached_at': '', 'detached_at': '', 'volume_id': 'f46710fb-384b-4d23-a1bb-cfb413e80958', 'serial': 'f46710fb-384b-4d23-a1bb-cfb413e80958'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1088.177707] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eaa38f1-d515-4e41-a634-1737f8ee8a33 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.210640] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6dc5477-cc10-48d0-805f-9ef2b4faceb6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.215449] env[69927]: INFO nova.compute.manager [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Took 39.48 seconds to build instance. [ 1088.224944] env[69927]: DEBUG nova.compute.utils [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1088.227617] env[69927]: DEBUG nova.compute.manager [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1088.227617] env[69927]: DEBUG nova.network.neutron [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1088.231487] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2939f1ef-9dfe-4128-8803-dc11e03a2019 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.261652] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fba42c10-8287-4e18-ac08-92dfbaeb121b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.278990] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-78a07679-7c06-4d34-ba51-4f4bcb2ceb4e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] The volume has not been displaced from its original location: [datastore2] volume-f46710fb-384b-4d23-a1bb-cfb413e80958/volume-f46710fb-384b-4d23-a1bb-cfb413e80958.vmdk. No consolidation needed. {{(pid=69927) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1088.284286] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-78a07679-7c06-4d34-ba51-4f4bcb2ceb4e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Reconfiguring VM instance instance-0000004c to detach disk 2002 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1088.285999] env[69927]: DEBUG nova.policy [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd44e4f75d4334bc39b39d09532822a52', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd89e48aa3c104d4da1c57d568178185c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1088.290041] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-212da25c-8b9f-4604-8439-a07ff6fbf475 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.313139] env[69927]: DEBUG oslo_vmware.api [None req-78a07679-7c06-4d34-ba51-4f4bcb2ceb4e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 1088.313139] env[69927]: value = "task-4096494" [ 1088.313139] env[69927]: _type = "Task" [ 1088.313139] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.332069] env[69927]: DEBUG oslo_vmware.api [None req-78a07679-7c06-4d34-ba51-4f4bcb2ceb4e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096494, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.523583] env[69927]: DEBUG nova.compute.manager [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1088.523986] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1088.524684] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab3218b7-c113-440b-ad44-3887ceb101dc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.538288] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1088.538966] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3cafff86-ae9d-4afa-ac58-52fd7f00b34a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.551784] env[69927]: DEBUG oslo_vmware.api [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1088.551784] env[69927]: value = "task-4096495" [ 1088.551784] env[69927]: _type = "Task" [ 1088.551784] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.563977] env[69927]: DEBUG oslo_vmware.api [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096495, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.578024] env[69927]: INFO nova.compute.manager [-] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Took 1.37 seconds to deallocate network for instance. [ 1088.652534] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b99c191-5ec4-43be-b7af-9f1dc67a5872 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.662823] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8aaf167-7d59-4dd2-b6b5-5a6f3b174d48 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.702142] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81c1bc88-a913-4277-9c08-b949f98ba95c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.707052] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a0a5e6b-6232-4c29-8f2f-8bc815c07727 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.718578] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ea4d760d-b3b0-4acd-9fc0-dfb6e0b65600 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.992s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.738781] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d059ab3-f0f5-4abd-9356-1a30f70781f5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.744408] env[69927]: DEBUG nova.compute.manager [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1088.751025] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5e4662d-4101-4006-a4c0-1ba3499b4364 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.768204] env[69927]: DEBUG nova.compute.provider_tree [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1088.818892] env[69927]: DEBUG nova.network.neutron [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Successfully created port: a1e7adcd-1321-49ab-a876-14b8e90db77c {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1088.827890] env[69927]: DEBUG oslo_vmware.api [None req-78a07679-7c06-4d34-ba51-4f4bcb2ceb4e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096494, 'name': ReconfigVM_Task, 'duration_secs': 0.296472} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.828277] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-78a07679-7c06-4d34-ba51-4f4bcb2ceb4e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Reconfigured VM instance instance-0000004c to detach disk 2002 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1088.833596] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1af1cae7-e97b-4764-90a5-486d26d15de9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.853479] env[69927]: DEBUG oslo_vmware.api [None req-78a07679-7c06-4d34-ba51-4f4bcb2ceb4e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 1088.853479] env[69927]: value = "task-4096496" [ 1088.853479] env[69927]: _type = "Task" [ 1088.853479] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.866751] env[69927]: DEBUG oslo_vmware.api [None req-78a07679-7c06-4d34-ba51-4f4bcb2ceb4e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096496, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.066163] env[69927]: DEBUG oslo_vmware.api [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096495, 'name': PowerOffVM_Task, 'duration_secs': 0.300466} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.066848] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1089.067298] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1089.067834] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d818b4d7-7979-4fb6-8e10-32ccc14a08fd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.087363] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.155616] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1089.155616] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1089.155616] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Deleting the datastore file [datastore1] e1b3d0bc-a251-4dbd-89a6-216a2f2c1313 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1089.155616] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2f19a3eb-3713-481a-9b10-75312611bc76 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.166704] env[69927]: DEBUG oslo_vmware.api [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1089.166704] env[69927]: value = "task-4096499" [ 1089.166704] env[69927]: _type = "Task" [ 1089.166704] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.186855] env[69927]: DEBUG oslo_vmware.api [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096499, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.275933] env[69927]: DEBUG nova.scheduler.client.report [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1089.281514] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Creating Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1089.281514] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a760eb1d-de73-4cd3-a6c8-a5a9c16226b8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.291303] env[69927]: DEBUG oslo_vmware.api [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1089.291303] env[69927]: value = "task-4096500" [ 1089.291303] env[69927]: _type = "Task" [ 1089.291303] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.304619] env[69927]: DEBUG oslo_vmware.api [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096500, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.365320] env[69927]: DEBUG oslo_vmware.api [None req-78a07679-7c06-4d34-ba51-4f4bcb2ceb4e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096496, 'name': ReconfigVM_Task, 'duration_secs': 0.195885} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.365763] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-78a07679-7c06-4d34-ba51-4f4bcb2ceb4e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811535', 'volume_id': 'f46710fb-384b-4d23-a1bb-cfb413e80958', 'name': 'volume-f46710fb-384b-4d23-a1bb-cfb413e80958', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0c8e43a3-3f33-4a41-81d3-a98565dca4a7', 'attached_at': '', 'detached_at': '', 'volume_id': 'f46710fb-384b-4d23-a1bb-cfb413e80958', 'serial': 'f46710fb-384b-4d23-a1bb-cfb413e80958'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1089.610828] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquiring lock "0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.611267] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.611367] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquiring lock "0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.611601] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.611818] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.615090] env[69927]: INFO nova.compute.manager [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Terminating instance [ 1089.679150] env[69927]: DEBUG oslo_vmware.api [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096499, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170075} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.679150] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1089.679150] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1089.679538] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1089.679538] env[69927]: INFO nova.compute.manager [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1089.679823] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1089.680105] env[69927]: DEBUG nova.compute.manager [-] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1089.680239] env[69927]: DEBUG nova.network.neutron [-] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1089.758476] env[69927]: DEBUG nova.compute.manager [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1089.782138] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.063s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.789674] env[69927]: DEBUG oslo_concurrency.lockutils [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.045s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.791922] env[69927]: INFO nova.compute.claims [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1089.807218] env[69927]: DEBUG nova.virt.hardware [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1089.808045] env[69927]: DEBUG nova.virt.hardware [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1089.808319] env[69927]: DEBUG nova.virt.hardware [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1089.808756] env[69927]: DEBUG nova.virt.hardware [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1089.809018] env[69927]: DEBUG nova.virt.hardware [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1089.809446] env[69927]: DEBUG nova.virt.hardware [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1089.810376] env[69927]: DEBUG nova.virt.hardware [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1089.810376] env[69927]: DEBUG nova.virt.hardware [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1089.810586] env[69927]: DEBUG nova.virt.hardware [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1089.810929] env[69927]: DEBUG nova.virt.hardware [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1089.811303] env[69927]: DEBUG nova.virt.hardware [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1089.812941] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4eb924c-913f-4a5d-bc85-6a77054316e2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.825700] env[69927]: DEBUG oslo_vmware.api [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096500, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.829116] env[69927]: INFO nova.scheduler.client.report [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Deleted allocations for instance 0f5643d4-52f3-4cba-b71b-9c4370175e35 [ 1089.832058] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc4b7fa5-8a6a-40ff-996b-258cc6bb274c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.923229] env[69927]: DEBUG nova.objects.instance [None req-78a07679-7c06-4d34-ba51-4f4bcb2ceb4e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lazy-loading 'flavor' on Instance uuid 0c8e43a3-3f33-4a41-81d3-a98565dca4a7 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1090.120405] env[69927]: DEBUG nova.compute.manager [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1090.120720] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1090.122085] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae70c4c-c54b-4692-b044-06f58964e435 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.132385] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1090.132662] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e343cfaa-4a24-40e0-9b47-a3e36ad07324 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.142414] env[69927]: DEBUG oslo_vmware.api [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1090.142414] env[69927]: value = "task-4096501" [ 1090.142414] env[69927]: _type = "Task" [ 1090.142414] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.153207] env[69927]: DEBUG oslo_vmware.api [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096501, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.316281] env[69927]: DEBUG oslo_vmware.api [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096500, 'name': CreateSnapshot_Task, 'duration_secs': 0.543384} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.317261] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Created Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1090.318814] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1476e06c-5f82-492c-9c56-eb326c712e16 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.347547] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c20152ea-0c63-4573-87f0-c6bdb9416a66 tempest-ImagesOneServerNegativeTestJSON-924528592 tempest-ImagesOneServerNegativeTestJSON-924528592-project-member] Lock "0f5643d4-52f3-4cba-b71b-9c4370175e35" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.056s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.535385] env[69927]: DEBUG nova.compute.manager [req-20f0cba5-1b1f-4416-93b3-e7707fb4d672 req-3802effc-9eef-43e0-9e43-a5306818a023 service nova] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Received event network-vif-plugged-a1e7adcd-1321-49ab-a876-14b8e90db77c {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1090.535640] env[69927]: DEBUG oslo_concurrency.lockutils [req-20f0cba5-1b1f-4416-93b3-e7707fb4d672 req-3802effc-9eef-43e0-9e43-a5306818a023 service nova] Acquiring lock "2ae5fcf7-3111-4e80-80b0-f9c1cece1001-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.535863] env[69927]: DEBUG oslo_concurrency.lockutils [req-20f0cba5-1b1f-4416-93b3-e7707fb4d672 req-3802effc-9eef-43e0-9e43-a5306818a023 service nova] Lock "2ae5fcf7-3111-4e80-80b0-f9c1cece1001-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.536063] env[69927]: DEBUG oslo_concurrency.lockutils [req-20f0cba5-1b1f-4416-93b3-e7707fb4d672 req-3802effc-9eef-43e0-9e43-a5306818a023 service nova] Lock "2ae5fcf7-3111-4e80-80b0-f9c1cece1001-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.536271] env[69927]: DEBUG nova.compute.manager [req-20f0cba5-1b1f-4416-93b3-e7707fb4d672 req-3802effc-9eef-43e0-9e43-a5306818a023 service nova] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] No waiting events found dispatching network-vif-plugged-a1e7adcd-1321-49ab-a876-14b8e90db77c {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1090.536443] env[69927]: WARNING nova.compute.manager [req-20f0cba5-1b1f-4416-93b3-e7707fb4d672 req-3802effc-9eef-43e0-9e43-a5306818a023 service nova] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Received unexpected event network-vif-plugged-a1e7adcd-1321-49ab-a876-14b8e90db77c for instance with vm_state building and task_state spawning. [ 1090.553110] env[69927]: DEBUG nova.network.neutron [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Successfully updated port: a1e7adcd-1321-49ab-a876-14b8e90db77c {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1090.571692] env[69927]: DEBUG nova.compute.manager [req-87278edb-93ce-417c-aacd-e5c3c3fe2275 req-37175d1e-bd7e-4625-85de-5b8ab7cbb855 service nova] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Received event network-vif-deleted-b347a042-35ea-41f5-a96e-84e4553f55d2 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1090.571692] env[69927]: INFO nova.compute.manager [req-87278edb-93ce-417c-aacd-e5c3c3fe2275 req-37175d1e-bd7e-4625-85de-5b8ab7cbb855 service nova] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Neutron deleted interface b347a042-35ea-41f5-a96e-84e4553f55d2; detaching it from the instance and deleting it from the info cache [ 1090.571941] env[69927]: DEBUG nova.network.neutron [req-87278edb-93ce-417c-aacd-e5c3c3fe2275 req-37175d1e-bd7e-4625-85de-5b8ab7cbb855 service nova] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.654238] env[69927]: DEBUG oslo_vmware.api [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096501, 'name': PowerOffVM_Task, 'duration_secs': 0.315967} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.654238] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1090.654238] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1090.654711] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c0ec3151-bf5f-4158-899f-fd7dde8a6eec {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.728095] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1090.728095] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1090.728095] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Deleting the datastore file [datastore2] 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1090.730573] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f99bffe0-f8da-4f29-bdff-5ecaef93b258 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.740073] env[69927]: DEBUG oslo_vmware.api [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1090.740073] env[69927]: value = "task-4096503" [ 1090.740073] env[69927]: _type = "Task" [ 1090.740073] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.749282] env[69927]: DEBUG oslo_vmware.api [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096503, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.849286] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Creating linked-clone VM from snapshot {{(pid=69927) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1090.855962] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b2d84593-6cdd-4fe1-9bc4-684366d2d1bc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.866443] env[69927]: DEBUG oslo_vmware.api [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1090.866443] env[69927]: value = "task-4096504" [ 1090.866443] env[69927]: _type = "Task" [ 1090.866443] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.879788] env[69927]: DEBUG oslo_vmware.api [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096504, 'name': CloneVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.933679] env[69927]: DEBUG oslo_concurrency.lockutils [None req-78a07679-7c06-4d34-ba51-4f4bcb2ceb4e tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.298s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.953359] env[69927]: DEBUG nova.network.neutron [-] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.056388] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Acquiring lock "refresh_cache-2ae5fcf7-3111-4e80-80b0-f9c1cece1001" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.056388] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Acquired lock "refresh_cache-2ae5fcf7-3111-4e80-80b0-f9c1cece1001" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1091.056651] env[69927]: DEBUG nova.network.neutron [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1091.077498] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eca7ceb3-d424-4cf3-99e1-e71e7e466281 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.088668] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c8879e2-e2b6-40ce-905a-f33a7de7a419 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.128039] env[69927]: DEBUG nova.compute.manager [req-87278edb-93ce-417c-aacd-e5c3c3fe2275 req-37175d1e-bd7e-4625-85de-5b8ab7cbb855 service nova] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Detach interface failed, port_id=b347a042-35ea-41f5-a96e-84e4553f55d2, reason: Instance e1b3d0bc-a251-4dbd-89a6-216a2f2c1313 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1091.219568] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b220032-0fcc-4b98-8794-b9e97d256489 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.229099] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c2ae3b-50aa-4443-b149-094174307d61 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.267614] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90cb63c6-0809-425e-969b-f87778dabf5f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.279221] env[69927]: DEBUG oslo_vmware.api [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096503, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142332} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.281060] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113ac2c6-2cc2-44b5-a00c-d8ff45b315c1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.285794] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1091.286140] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1091.286353] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1091.286538] env[69927]: INFO nova.compute.manager [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1091.286824] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1091.287162] env[69927]: DEBUG nova.compute.manager [-] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1091.287233] env[69927]: DEBUG nova.network.neutron [-] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1091.307604] env[69927]: DEBUG nova.compute.provider_tree [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1091.379286] env[69927]: DEBUG oslo_vmware.api [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096504, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.390724] env[69927]: DEBUG oslo_concurrency.lockutils [None req-617446b3-3737-4344-97eb-5e4bd9d55bde tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "interface-20ac32b7-51fc-40bf-a667-2aeb6c8c7648-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.390724] env[69927]: DEBUG oslo_concurrency.lockutils [None req-617446b3-3737-4344-97eb-5e4bd9d55bde tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "interface-20ac32b7-51fc-40bf-a667-2aeb6c8c7648-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.390724] env[69927]: DEBUG nova.objects.instance [None req-617446b3-3737-4344-97eb-5e4bd9d55bde tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lazy-loading 'flavor' on Instance uuid 20ac32b7-51fc-40bf-a667-2aeb6c8c7648 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1091.456693] env[69927]: INFO nova.compute.manager [-] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Took 1.78 seconds to deallocate network for instance. [ 1091.648263] env[69927]: DEBUG nova.network.neutron [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1091.810424] env[69927]: DEBUG nova.scheduler.client.report [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1091.882693] env[69927]: DEBUG oslo_vmware.api [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096504, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.894742] env[69927]: DEBUG nova.objects.instance [None req-617446b3-3737-4344-97eb-5e4bd9d55bde tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lazy-loading 'pci_requests' on Instance uuid 20ac32b7-51fc-40bf-a667-2aeb6c8c7648 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1091.965851] env[69927]: DEBUG oslo_concurrency.lockutils [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.971218] env[69927]: DEBUG nova.network.neutron [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Updating instance_info_cache with network_info: [{"id": "a1e7adcd-1321-49ab-a876-14b8e90db77c", "address": "fa:16:3e:8f:eb:eb", "network": {"id": "9df95590-9bd4-439f-a767-9262ab7a3d5e", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-2054608473-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d89e48aa3c104d4da1c57d568178185c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1e7adcd-13", "ovs_interfaceid": "a1e7adcd-1321-49ab-a876-14b8e90db77c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.098330] env[69927]: DEBUG nova.network.neutron [-] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.316029] env[69927]: DEBUG oslo_concurrency.lockutils [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.527s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.316655] env[69927]: DEBUG nova.compute.manager [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1092.320084] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.217s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.321395] env[69927]: INFO nova.compute.claims [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1092.391182] env[69927]: DEBUG oslo_vmware.api [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096504, 'name': CloneVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.403256] env[69927]: DEBUG nova.objects.base [None req-617446b3-3737-4344-97eb-5e4bd9d55bde tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Object Instance<20ac32b7-51fc-40bf-a667-2aeb6c8c7648> lazy-loaded attributes: flavor,pci_requests {{(pid=69927) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1092.403498] env[69927]: DEBUG nova.network.neutron [None req-617446b3-3737-4344-97eb-5e4bd9d55bde tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1092.474154] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Releasing lock "refresh_cache-2ae5fcf7-3111-4e80-80b0-f9c1cece1001" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1092.474533] env[69927]: DEBUG nova.compute.manager [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Instance network_info: |[{"id": "a1e7adcd-1321-49ab-a876-14b8e90db77c", "address": "fa:16:3e:8f:eb:eb", "network": {"id": "9df95590-9bd4-439f-a767-9262ab7a3d5e", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-2054608473-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d89e48aa3c104d4da1c57d568178185c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1e7adcd-13", "ovs_interfaceid": "a1e7adcd-1321-49ab-a876-14b8e90db77c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1092.475026] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:eb:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '459b8c74-0aa6-42b6-996a-42b1c5d7e5c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a1e7adcd-1321-49ab-a876-14b8e90db77c', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1092.484304] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Creating folder: Project (d89e48aa3c104d4da1c57d568178185c). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1092.488734] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2e605f6e-9240-4a94-bbfa-a75ccad2a722 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.503076] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Created folder: Project (d89e48aa3c104d4da1c57d568178185c) in parent group-v811283. [ 1092.503301] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Creating folder: Instances. Parent ref: group-v811543. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1092.503578] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-52b170fa-2891-4e4a-9395-8bc9532da5e6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.516729] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Created folder: Instances in parent group-v811543. [ 1092.517029] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1092.517238] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1092.517465] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f2f6e36d-9932-4d81-a35f-89fe0d6edddf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.542012] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1092.542012] env[69927]: value = "task-4096507" [ 1092.542012] env[69927]: _type = "Task" [ 1092.542012] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.552413] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096507, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.583928] env[69927]: DEBUG oslo_concurrency.lockutils [None req-617446b3-3737-4344-97eb-5e4bd9d55bde tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "interface-20ac32b7-51fc-40bf-a667-2aeb6c8c7648-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.194s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.600077] env[69927]: INFO nova.compute.manager [-] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Took 1.31 seconds to deallocate network for instance. [ 1092.829747] env[69927]: DEBUG nova.compute.utils [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1092.831779] env[69927]: DEBUG nova.compute.manager [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1092.832021] env[69927]: DEBUG nova.network.neutron [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1092.882126] env[69927]: DEBUG oslo_vmware.api [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096504, 'name': CloneVM_Task, 'duration_secs': 1.521691} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.882679] env[69927]: INFO nova.virt.vmwareapi.vmops [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Created linked-clone VM from snapshot [ 1092.883932] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3211ba-b21d-4220-96fd-94099223e728 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.893865] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Uploading image 87a22900-db4f-4f48-b2cd-ab99a8178619 {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1092.930715] env[69927]: DEBUG oslo_vmware.rw_handles [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1092.930715] env[69927]: value = "vm-811542" [ 1092.930715] env[69927]: _type = "VirtualMachine" [ 1092.930715] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1092.931298] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-57170a43-53a7-4701-9b2e-8005adee415b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.935840] env[69927]: DEBUG nova.policy [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de8b1b11969a4feb818dc682d2fec552', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61b1aea0ccf049c8942ba32932412497', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1092.943583] env[69927]: DEBUG oslo_vmware.rw_handles [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lease: (returnval){ [ 1092.943583] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5200392c-f6bb-3884-dc25-5033004e6f0a" [ 1092.943583] env[69927]: _type = "HttpNfcLease" [ 1092.943583] env[69927]: } obtained for exporting VM: (result){ [ 1092.943583] env[69927]: value = "vm-811542" [ 1092.943583] env[69927]: _type = "VirtualMachine" [ 1092.943583] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1092.943872] env[69927]: DEBUG oslo_vmware.api [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the lease: (returnval){ [ 1092.943872] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5200392c-f6bb-3884-dc25-5033004e6f0a" [ 1092.943872] env[69927]: _type = "HttpNfcLease" [ 1092.943872] env[69927]: } to be ready. {{(pid=69927) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1092.951272] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.951272] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.951272] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.951272] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.951272] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.952535] env[69927]: INFO nova.compute.manager [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Terminating instance [ 1092.956276] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1092.956276] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5200392c-f6bb-3884-dc25-5033004e6f0a" [ 1092.956276] env[69927]: _type = "HttpNfcLease" [ 1092.956276] env[69927]: } is ready. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1092.957859] env[69927]: DEBUG oslo_vmware.rw_handles [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1092.957859] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5200392c-f6bb-3884-dc25-5033004e6f0a" [ 1092.957859] env[69927]: _type = "HttpNfcLease" [ 1092.957859] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1092.959150] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aab05e2-98ee-415d-b080-e0d3ad92f0db {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.970251] env[69927]: DEBUG oslo_vmware.rw_handles [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528c9b62-5d02-f1d0-df4d-28e8a82c10d7/disk-0.vmdk from lease info. {{(pid=69927) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1092.970667] env[69927]: DEBUG oslo_vmware.rw_handles [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528c9b62-5d02-f1d0-df4d-28e8a82c10d7/disk-0.vmdk for reading. {{(pid=69927) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1093.054202] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096507, 'name': CreateVM_Task, 'duration_secs': 0.375338} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.054412] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1093.055240] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.055699] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1093.055809] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1093.056038] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7da433a3-5347-4894-8301-2e849adeb2ff {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.067017] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-21246462-8c74-4c07-85a0-5d81b1e537b2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.070045] env[69927]: DEBUG oslo_vmware.api [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Waiting for the task: (returnval){ [ 1093.070045] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522218ea-e827-07a6-5d8e-70a04f48aeac" [ 1093.070045] env[69927]: _type = "Task" [ 1093.070045] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.082051] env[69927]: DEBUG oslo_vmware.api [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522218ea-e827-07a6-5d8e-70a04f48aeac, 'name': SearchDatastore_Task, 'duration_secs': 0.01198} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.082669] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1093.082914] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1093.083191] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.083335] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1093.083516] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1093.084023] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-55e8a3e6-9df1-45d4-905a-ded28a6784d1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.094417] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1093.094475] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1093.095721] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c9f4397-901c-4179-bb62-566a21ca20c2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.101823] env[69927]: DEBUG oslo_vmware.api [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Waiting for the task: (returnval){ [ 1093.101823] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f06843-d17a-4ccd-9617-c9dea424b806" [ 1093.101823] env[69927]: _type = "Task" [ 1093.101823] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.109272] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1093.121978] env[69927]: DEBUG oslo_vmware.api [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f06843-d17a-4ccd-9617-c9dea424b806, 'name': SearchDatastore_Task, 'duration_secs': 0.010809} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.122844] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-473f728e-d001-4a56-92dc-efccf657d934 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.129692] env[69927]: DEBUG oslo_vmware.api [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Waiting for the task: (returnval){ [ 1093.129692] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b98272-2bff-c143-0c59-948acce747fc" [ 1093.129692] env[69927]: _type = "Task" [ 1093.129692] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.139065] env[69927]: DEBUG oslo_vmware.api [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b98272-2bff-c143-0c59-948acce747fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.338773] env[69927]: DEBUG nova.compute.manager [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1093.356610] env[69927]: DEBUG nova.network.neutron [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Successfully created port: 216a398c-956e-4115-ae6d-b045d946831b {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1093.465430] env[69927]: DEBUG nova.compute.manager [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1093.465430] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1093.467672] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23469092-8230-4cd0-b9d7-09b30b86eb43 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.486053] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1093.487195] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5150c2ba-0ddc-4097-a6f2-3efd245724da {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.496413] env[69927]: DEBUG oslo_vmware.api [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 1093.496413] env[69927]: value = "task-4096509" [ 1093.496413] env[69927]: _type = "Task" [ 1093.496413] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.507022] env[69927]: DEBUG oslo_vmware.api [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096509, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.644241] env[69927]: DEBUG oslo_vmware.api [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b98272-2bff-c143-0c59-948acce747fc, 'name': SearchDatastore_Task, 'duration_secs': 0.010238} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.648089] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1093.648464] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 2ae5fcf7-3111-4e80-80b0-f9c1cece1001/2ae5fcf7-3111-4e80-80b0-f9c1cece1001.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1093.649104] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f1cc740-70bf-4873-b739-9a6b8d5679d5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.657988] env[69927]: DEBUG oslo_vmware.api [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Waiting for the task: (returnval){ [ 1093.657988] env[69927]: value = "task-4096510" [ 1093.657988] env[69927]: _type = "Task" [ 1093.657988] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.672447] env[69927]: DEBUG oslo_vmware.api [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Task: {'id': task-4096510, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.761629] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3959c3ac-4e56-49f7-9f1e-9c8b35aad811 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.774873] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bbe67f4-bf44-4720-81ed-1cca01478856 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.814830] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36de0b3d-3f70-4e4b-93e1-621ddcd1a62a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.825256] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b22be647-8358-4c23-8c33-047383b02962 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.843866] env[69927]: DEBUG nova.compute.provider_tree [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1093.934858] env[69927]: DEBUG nova.compute.manager [req-b268eb8a-061b-4985-b359-58d2412b072b req-5877b050-e091-43a4-b1fb-17fa7224b618 service nova] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Received event network-changed-a1e7adcd-1321-49ab-a876-14b8e90db77c {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1093.938630] env[69927]: DEBUG nova.compute.manager [req-b268eb8a-061b-4985-b359-58d2412b072b req-5877b050-e091-43a4-b1fb-17fa7224b618 service nova] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Refreshing instance network info cache due to event network-changed-a1e7adcd-1321-49ab-a876-14b8e90db77c. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1093.939039] env[69927]: DEBUG oslo_concurrency.lockutils [req-b268eb8a-061b-4985-b359-58d2412b072b req-5877b050-e091-43a4-b1fb-17fa7224b618 service nova] Acquiring lock "refresh_cache-2ae5fcf7-3111-4e80-80b0-f9c1cece1001" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.939039] env[69927]: DEBUG oslo_concurrency.lockutils [req-b268eb8a-061b-4985-b359-58d2412b072b req-5877b050-e091-43a4-b1fb-17fa7224b618 service nova] Acquired lock "refresh_cache-2ae5fcf7-3111-4e80-80b0-f9c1cece1001" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1093.939278] env[69927]: DEBUG nova.network.neutron [req-b268eb8a-061b-4985-b359-58d2412b072b req-5877b050-e091-43a4-b1fb-17fa7224b618 service nova] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Refreshing network info cache for port a1e7adcd-1321-49ab-a876-14b8e90db77c {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1093.991068] env[69927]: DEBUG oslo_concurrency.lockutils [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Acquiring lock "a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1093.991367] env[69927]: DEBUG oslo_concurrency.lockutils [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Lock "a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.011587] env[69927]: DEBUG oslo_vmware.api [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096509, 'name': PowerOffVM_Task, 'duration_secs': 0.263884} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.012982] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1094.013350] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1094.013724] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f7e54a1b-37f7-42f6-91d6-621ffb6df421 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.129173] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1094.130118] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1094.130857] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Deleting the datastore file [datastore1] 0c8e43a3-3f33-4a41-81d3-a98565dca4a7 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1094.133757] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-98884279-7a7c-48c0-8128-89220c5e9076 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.143619] env[69927]: DEBUG oslo_vmware.api [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for the task: (returnval){ [ 1094.143619] env[69927]: value = "task-4096512" [ 1094.143619] env[69927]: _type = "Task" [ 1094.143619] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.157827] env[69927]: DEBUG oslo_vmware.api [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096512, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.169615] env[69927]: DEBUG oslo_vmware.api [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Task: {'id': task-4096510, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.352098] env[69927]: DEBUG nova.scheduler.client.report [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1094.358587] env[69927]: DEBUG nova.compute.manager [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1094.392377] env[69927]: DEBUG nova.virt.hardware [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1094.392680] env[69927]: DEBUG nova.virt.hardware [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1094.392914] env[69927]: DEBUG nova.virt.hardware [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1094.393129] env[69927]: DEBUG nova.virt.hardware [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1094.393294] env[69927]: DEBUG nova.virt.hardware [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1094.393534] env[69927]: DEBUG nova.virt.hardware [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1094.393783] env[69927]: DEBUG nova.virt.hardware [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1094.393967] env[69927]: DEBUG nova.virt.hardware [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1094.394200] env[69927]: DEBUG nova.virt.hardware [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1094.394633] env[69927]: DEBUG nova.virt.hardware [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1094.394863] env[69927]: DEBUG nova.virt.hardware [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1094.396179] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7001d08e-82d4-4e99-b284-03c79f6429e2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.409684] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0f4b01-5e6b-4376-a278-e6b8e1c50303 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.495022] env[69927]: DEBUG nova.compute.manager [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1094.657625] env[69927]: DEBUG oslo_vmware.api [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Task: {'id': task-4096512, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.465496} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.657961] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1094.658166] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1094.658348] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1094.658535] env[69927]: INFO nova.compute.manager [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1094.658880] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1094.659132] env[69927]: DEBUG nova.compute.manager [-] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1094.659202] env[69927]: DEBUG nova.network.neutron [-] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1094.671987] env[69927]: DEBUG oslo_vmware.api [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Task: {'id': task-4096510, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.656078} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.672690] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 2ae5fcf7-3111-4e80-80b0-f9c1cece1001/2ae5fcf7-3111-4e80-80b0-f9c1cece1001.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1094.672952] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1094.673218] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5aec0f86-638d-4a05-95cb-54941b41018d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.681471] env[69927]: DEBUG oslo_vmware.api [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Waiting for the task: (returnval){ [ 1094.681471] env[69927]: value = "task-4096513" [ 1094.681471] env[69927]: _type = "Task" [ 1094.681471] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.692818] env[69927]: DEBUG oslo_vmware.api [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Task: {'id': task-4096513, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.723869] env[69927]: DEBUG nova.network.neutron [req-b268eb8a-061b-4985-b359-58d2412b072b req-5877b050-e091-43a4-b1fb-17fa7224b618 service nova] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Updated VIF entry in instance network info cache for port a1e7adcd-1321-49ab-a876-14b8e90db77c. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1094.724521] env[69927]: DEBUG nova.network.neutron [req-b268eb8a-061b-4985-b359-58d2412b072b req-5877b050-e091-43a4-b1fb-17fa7224b618 service nova] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Updating instance_info_cache with network_info: [{"id": "a1e7adcd-1321-49ab-a876-14b8e90db77c", "address": "fa:16:3e:8f:eb:eb", "network": {"id": "9df95590-9bd4-439f-a767-9262ab7a3d5e", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-2054608473-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d89e48aa3c104d4da1c57d568178185c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1e7adcd-13", "ovs_interfaceid": "a1e7adcd-1321-49ab-a876-14b8e90db77c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.830023] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "interface-20ac32b7-51fc-40bf-a667-2aeb6c8c7648-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.830023] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "interface-20ac32b7-51fc-40bf-a667-2aeb6c8c7648-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.830023] env[69927]: DEBUG nova.objects.instance [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lazy-loading 'flavor' on Instance uuid 20ac32b7-51fc-40bf-a667-2aeb6c8c7648 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1094.863231] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.543s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.863952] env[69927]: DEBUG nova.compute.manager [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1094.866604] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.074s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.866906] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.869243] env[69927]: DEBUG oslo_concurrency.lockutils [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.608s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.869472] env[69927]: DEBUG nova.objects.instance [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Lazy-loading 'resources' on Instance uuid c6f166c7-538f-4c8a-9500-48319c694ea0 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1094.898086] env[69927]: INFO nova.scheduler.client.report [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Deleted allocations for instance 5581f8af-9796-48ad-a2f3-557e90d9662a [ 1095.024748] env[69927]: DEBUG oslo_concurrency.lockutils [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.197107] env[69927]: DEBUG oslo_vmware.api [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Task: {'id': task-4096513, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068988} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.197242] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1095.198824] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e4e3e1-51db-4aac-a0ce-a6e25df5459e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.229801] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 2ae5fcf7-3111-4e80-80b0-f9c1cece1001/2ae5fcf7-3111-4e80-80b0-f9c1cece1001.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1095.231517] env[69927]: DEBUG nova.network.neutron [-] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.236020] env[69927]: DEBUG oslo_concurrency.lockutils [req-b268eb8a-061b-4985-b359-58d2412b072b req-5877b050-e091-43a4-b1fb-17fa7224b618 service nova] Releasing lock "refresh_cache-2ae5fcf7-3111-4e80-80b0-f9c1cece1001" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1095.236020] env[69927]: DEBUG nova.compute.manager [req-b268eb8a-061b-4985-b359-58d2412b072b req-5877b050-e091-43a4-b1fb-17fa7224b618 service nova] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Received event network-vif-deleted-c7cb00d8-8939-41db-b3dd-8dd937a6daf1 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1095.236020] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10dd7c43-119f-45cf-9023-c70f9ae0b2fc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.254889] env[69927]: DEBUG nova.network.neutron [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Successfully updated port: 216a398c-956e-4115-ae6d-b045d946831b {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1095.260300] env[69927]: DEBUG oslo_vmware.api [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Waiting for the task: (returnval){ [ 1095.260300] env[69927]: value = "task-4096514" [ 1095.260300] env[69927]: _type = "Task" [ 1095.260300] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.270164] env[69927]: DEBUG oslo_vmware.api [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Task: {'id': task-4096514, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.373701] env[69927]: DEBUG nova.compute.utils [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1095.379274] env[69927]: DEBUG nova.compute.manager [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1095.379541] env[69927]: DEBUG nova.network.neutron [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1095.419459] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84e19dcd-9d6b-49da-9308-ecfe474dce16 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "5581f8af-9796-48ad-a2f3-557e90d9662a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.357s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.449630] env[69927]: DEBUG nova.policy [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd89d0a2232b4da1a0b88799062fe8da', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3afde63c8cbe4aecb32a470fd6b948f6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1095.466348] env[69927]: DEBUG nova.objects.instance [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lazy-loading 'pci_requests' on Instance uuid 20ac32b7-51fc-40bf-a667-2aeb6c8c7648 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1095.481034] env[69927]: DEBUG nova.compute.manager [req-85b3a85e-1c18-41df-819a-07fed49afd46 req-8d83f617-0867-4c8c-8832-68a947b56aed service nova] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Received event network-vif-plugged-216a398c-956e-4115-ae6d-b045d946831b {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1095.481307] env[69927]: DEBUG oslo_concurrency.lockutils [req-85b3a85e-1c18-41df-819a-07fed49afd46 req-8d83f617-0867-4c8c-8832-68a947b56aed service nova] Acquiring lock "9aa0a285-66e4-4792-bbe9-a62f76666ec6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.481737] env[69927]: DEBUG oslo_concurrency.lockutils [req-85b3a85e-1c18-41df-819a-07fed49afd46 req-8d83f617-0867-4c8c-8832-68a947b56aed service nova] Lock "9aa0a285-66e4-4792-bbe9-a62f76666ec6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.481737] env[69927]: DEBUG oslo_concurrency.lockutils [req-85b3a85e-1c18-41df-819a-07fed49afd46 req-8d83f617-0867-4c8c-8832-68a947b56aed service nova] Lock "9aa0a285-66e4-4792-bbe9-a62f76666ec6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.481921] env[69927]: DEBUG nova.compute.manager [req-85b3a85e-1c18-41df-819a-07fed49afd46 req-8d83f617-0867-4c8c-8832-68a947b56aed service nova] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] No waiting events found dispatching network-vif-plugged-216a398c-956e-4115-ae6d-b045d946831b {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1095.482220] env[69927]: WARNING nova.compute.manager [req-85b3a85e-1c18-41df-819a-07fed49afd46 req-8d83f617-0867-4c8c-8832-68a947b56aed service nova] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Received unexpected event network-vif-plugged-216a398c-956e-4115-ae6d-b045d946831b for instance with vm_state building and task_state spawning. [ 1095.752712] env[69927]: INFO nova.compute.manager [-] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Took 1.09 seconds to deallocate network for instance. [ 1095.759525] env[69927]: DEBUG oslo_concurrency.lockutils [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "refresh_cache-9aa0a285-66e4-4792-bbe9-a62f76666ec6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.759705] env[69927]: DEBUG oslo_concurrency.lockutils [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired lock "refresh_cache-9aa0a285-66e4-4792-bbe9-a62f76666ec6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1095.759857] env[69927]: DEBUG nova.network.neutron [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1095.776205] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5880747d-f753-4c4c-b048-33e4b831c0f9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.783136] env[69927]: DEBUG nova.network.neutron [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Successfully created port: 6be5b0c0-f02c-4af2-9ca7-f5792eb7bfbd {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1095.792588] env[69927]: DEBUG oslo_vmware.api [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Task: {'id': task-4096514, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.793577] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b51c5f20-7bd5-4bbf-9c00-3bd90a8fd0ea {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.829318] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1811e069-4d64-4481-b3c9-a512cd29df34 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.839882] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1697f22b-c979-48b1-b819-be7eedd65a7a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.856110] env[69927]: DEBUG nova.compute.provider_tree [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1095.880518] env[69927]: DEBUG nova.compute.manager [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1095.971242] env[69927]: DEBUG nova.objects.base [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Object Instance<20ac32b7-51fc-40bf-a667-2aeb6c8c7648> lazy-loaded attributes: flavor,pci_requests {{(pid=69927) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1095.971439] env[69927]: DEBUG nova.network.neutron [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1096.033009] env[69927]: DEBUG nova.policy [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ed20f23b4104e2ea75ea29b804c79d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ed984d7170742eca7e89bf3bf45e6ae', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1096.229606] env[69927]: DEBUG nova.compute.manager [req-4f357935-62ec-4dd1-b70e-92a8593302ce req-aa5cd5d7-665b-4908-9e35-cd6f1fab069f service nova] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Received event network-vif-deleted-0083dc02-3370-427b-bd94-c2267d234d68 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1096.264035] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1096.278984] env[69927]: DEBUG oslo_vmware.api [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Task: {'id': task-4096514, 'name': ReconfigVM_Task, 'duration_secs': 0.790792} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.279389] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 2ae5fcf7-3111-4e80-80b0-f9c1cece1001/2ae5fcf7-3111-4e80-80b0-f9c1cece1001.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1096.283475] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-72494ab6-a40f-4b00-924b-d10d47302a08 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.291520] env[69927]: DEBUG oslo_vmware.api [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Waiting for the task: (returnval){ [ 1096.291520] env[69927]: value = "task-4096515" [ 1096.291520] env[69927]: _type = "Task" [ 1096.291520] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.303926] env[69927]: DEBUG oslo_vmware.api [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Task: {'id': task-4096515, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.323315] env[69927]: DEBUG nova.network.neutron [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1096.359403] env[69927]: DEBUG nova.scheduler.client.report [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1096.489551] env[69927]: DEBUG nova.network.neutron [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Successfully created port: 5e16c6cd-8c81-4e94-97f0-79a4834e6c6f {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1096.611473] env[69927]: DEBUG nova.network.neutron [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Updating instance_info_cache with network_info: [{"id": "216a398c-956e-4115-ae6d-b045d946831b", "address": "fa:16:3e:84:a5:05", "network": {"id": "625b5644-8a8f-4789-8c96-083982c1bf4c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-124597975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61b1aea0ccf049c8942ba32932412497", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap216a398c-95", "ovs_interfaceid": "216a398c-956e-4115-ae6d-b045d946831b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1096.802774] env[69927]: DEBUG oslo_vmware.api [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Task: {'id': task-4096515, 'name': Rename_Task, 'duration_secs': 0.170846} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.803104] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1096.803396] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3ebae181-2901-4e8c-9829-47acaec18973 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.812031] env[69927]: DEBUG oslo_vmware.api [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Waiting for the task: (returnval){ [ 1096.812031] env[69927]: value = "task-4096516" [ 1096.812031] env[69927]: _type = "Task" [ 1096.812031] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.823212] env[69927]: DEBUG oslo_vmware.api [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Task: {'id': task-4096516, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.865797] env[69927]: DEBUG oslo_concurrency.lockutils [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.996s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.869354] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.271s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1096.871473] env[69927]: INFO nova.compute.claims [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1096.892719] env[69927]: DEBUG nova.compute.manager [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1096.901773] env[69927]: INFO nova.scheduler.client.report [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Deleted allocations for instance c6f166c7-538f-4c8a-9500-48319c694ea0 [ 1096.949605] env[69927]: DEBUG nova.virt.hardware [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1096.949992] env[69927]: DEBUG nova.virt.hardware [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1096.950455] env[69927]: DEBUG nova.virt.hardware [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1096.950761] env[69927]: DEBUG nova.virt.hardware [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1096.950955] env[69927]: DEBUG nova.virt.hardware [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1096.951136] env[69927]: DEBUG nova.virt.hardware [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1096.951548] env[69927]: DEBUG nova.virt.hardware [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1096.951836] env[69927]: DEBUG nova.virt.hardware [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1096.952150] env[69927]: DEBUG nova.virt.hardware [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1096.952462] env[69927]: DEBUG nova.virt.hardware [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1096.952755] env[69927]: DEBUG nova.virt.hardware [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1096.954416] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f07a4c73-33fb-4f0c-a405-480655259bb4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.966997] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e471e9-9841-48db-901e-2ba86eb7f6eb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.113884] env[69927]: DEBUG oslo_concurrency.lockutils [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Releasing lock "refresh_cache-9aa0a285-66e4-4792-bbe9-a62f76666ec6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1097.114242] env[69927]: DEBUG nova.compute.manager [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Instance network_info: |[{"id": "216a398c-956e-4115-ae6d-b045d946831b", "address": "fa:16:3e:84:a5:05", "network": {"id": "625b5644-8a8f-4789-8c96-083982c1bf4c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-124597975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61b1aea0ccf049c8942ba32932412497", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap216a398c-95", "ovs_interfaceid": "216a398c-956e-4115-ae6d-b045d946831b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1097.114689] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:a5:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '415e68b4-3766-4359-afe2-f8563910d98c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '216a398c-956e-4115-ae6d-b045d946831b', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1097.130583] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1097.132028] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1097.133041] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b6cd59e-9cd1-4bbd-92fd-eb64baf8e3ae {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.163723] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1097.163723] env[69927]: value = "task-4096517" [ 1097.163723] env[69927]: _type = "Task" [ 1097.163723] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.172529] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096517, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.329425] env[69927]: DEBUG oslo_vmware.api [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Task: {'id': task-4096516, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.363237] env[69927]: DEBUG nova.network.neutron [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Successfully updated port: 6be5b0c0-f02c-4af2-9ca7-f5792eb7bfbd {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1097.413237] env[69927]: DEBUG oslo_concurrency.lockutils [None req-10dab88e-538a-4d02-ad2a-0d874944f04c tempest-ServerMetadataNegativeTestJSON-1540832771 tempest-ServerMetadataNegativeTestJSON-1540832771-project-member] Lock "c6f166c7-538f-4c8a-9500-48319c694ea0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.645s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.674264] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096517, 'name': CreateVM_Task, 'duration_secs': 0.462717} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.674457] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1097.675190] env[69927]: DEBUG oslo_concurrency.lockutils [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.675359] env[69927]: DEBUG oslo_concurrency.lockutils [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1097.675708] env[69927]: DEBUG oslo_concurrency.lockutils [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1097.675963] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5dbe607-d543-449c-b150-283dc78ba495 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.681329] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1097.681329] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f850c6-001c-f26a-f784-a94492a93b4b" [ 1097.681329] env[69927]: _type = "Task" [ 1097.681329] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.687603] env[69927]: DEBUG nova.compute.manager [req-52dcd0b3-515b-4aa4-969f-13d0f45c3b74 req-f876ec01-3290-48c1-b7eb-5a4a15b3e097 service nova] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Received event network-changed-216a398c-956e-4115-ae6d-b045d946831b {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1097.687816] env[69927]: DEBUG nova.compute.manager [req-52dcd0b3-515b-4aa4-969f-13d0f45c3b74 req-f876ec01-3290-48c1-b7eb-5a4a15b3e097 service nova] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Refreshing instance network info cache due to event network-changed-216a398c-956e-4115-ae6d-b045d946831b. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1097.688158] env[69927]: DEBUG oslo_concurrency.lockutils [req-52dcd0b3-515b-4aa4-969f-13d0f45c3b74 req-f876ec01-3290-48c1-b7eb-5a4a15b3e097 service nova] Acquiring lock "refresh_cache-9aa0a285-66e4-4792-bbe9-a62f76666ec6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.688339] env[69927]: DEBUG oslo_concurrency.lockutils [req-52dcd0b3-515b-4aa4-969f-13d0f45c3b74 req-f876ec01-3290-48c1-b7eb-5a4a15b3e097 service nova] Acquired lock "refresh_cache-9aa0a285-66e4-4792-bbe9-a62f76666ec6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1097.688445] env[69927]: DEBUG nova.network.neutron [req-52dcd0b3-515b-4aa4-969f-13d0f45c3b74 req-f876ec01-3290-48c1-b7eb-5a4a15b3e097 service nova] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Refreshing network info cache for port 216a398c-956e-4115-ae6d-b045d946831b {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1097.696432] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f850c6-001c-f26a-f784-a94492a93b4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.822548] env[69927]: DEBUG oslo_vmware.api [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Task: {'id': task-4096516, 'name': PowerOnVM_Task, 'duration_secs': 0.57454} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.822829] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1097.823073] env[69927]: INFO nova.compute.manager [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Took 8.06 seconds to spawn the instance on the hypervisor. [ 1097.823270] env[69927]: DEBUG nova.compute.manager [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1097.824066] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e13877dc-fcde-446d-baab-ca6222daf458 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.869463] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "refresh_cache-cba314de-644e-451e-8ecc-2e209d74bbce" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.869548] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired lock "refresh_cache-cba314de-644e-451e-8ecc-2e209d74bbce" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1097.869704] env[69927]: DEBUG nova.network.neutron [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1098.198771] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f850c6-001c-f26a-f784-a94492a93b4b, 'name': SearchDatastore_Task, 'duration_secs': 0.014331} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.199713] env[69927]: DEBUG oslo_concurrency.lockutils [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1098.200455] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1098.200455] env[69927]: DEBUG oslo_concurrency.lockutils [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.200835] env[69927]: DEBUG oslo_concurrency.lockutils [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.200969] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1098.207232] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43f2e262-664c-472b-b885-45fd90243fbf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.218775] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c50aa72-fcd5-41d4-80b8-f52d09b0cbd9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.223544] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1098.223735] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1098.225508] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-181bfff9-0f75-42d5-b30a-3bbce045ca0b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.233316] env[69927]: DEBUG nova.network.neutron [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Successfully updated port: 5e16c6cd-8c81-4e94-97f0-79a4834e6c6f {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1098.237062] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57520a4-58c9-48ad-b350-2d8c8cbd224e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.243605] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1098.243605] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52645645-6c69-c984-fbd2-4842382e897d" [ 1098.243605] env[69927]: _type = "Task" [ 1098.243605] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.278780] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c7caa35-0fe0-48b3-8346-9dd291a124bb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.285948] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52645645-6c69-c984-fbd2-4842382e897d, 'name': SearchDatastore_Task, 'duration_secs': 0.016882} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.287166] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32a19c16-03c2-4e74-9e09-0d66472e14a8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.293993] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce42d9ac-ace6-4e84-ace1-d9dab20f9aba {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.301420] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1098.301420] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5285c6bf-b072-3dc5-6518-d29e50b16ed2" [ 1098.301420] env[69927]: _type = "Task" [ 1098.301420] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.315097] env[69927]: DEBUG nova.compute.provider_tree [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1098.324834] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5285c6bf-b072-3dc5-6518-d29e50b16ed2, 'name': SearchDatastore_Task, 'duration_secs': 0.013734} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.325162] env[69927]: DEBUG oslo_concurrency.lockutils [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1098.325448] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 9aa0a285-66e4-4792-bbe9-a62f76666ec6/9aa0a285-66e4-4792-bbe9-a62f76666ec6.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1098.325870] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a7be248e-02f3-40e4-be83-fbb82181e421 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.335076] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1098.335076] env[69927]: value = "task-4096518" [ 1098.335076] env[69927]: _type = "Task" [ 1098.335076] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.350090] env[69927]: INFO nova.compute.manager [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Took 36.62 seconds to build instance. [ 1098.356313] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096518, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.367941] env[69927]: DEBUG nova.compute.manager [req-5451b31f-7e07-4c2c-a626-a27d7470b729 req-255ee60f-fab2-4410-8028-30185765144c service nova] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Received event network-vif-plugged-6be5b0c0-f02c-4af2-9ca7-f5792eb7bfbd {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1098.368236] env[69927]: DEBUG oslo_concurrency.lockutils [req-5451b31f-7e07-4c2c-a626-a27d7470b729 req-255ee60f-fab2-4410-8028-30185765144c service nova] Acquiring lock "cba314de-644e-451e-8ecc-2e209d74bbce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1098.368393] env[69927]: DEBUG oslo_concurrency.lockutils [req-5451b31f-7e07-4c2c-a626-a27d7470b729 req-255ee60f-fab2-4410-8028-30185765144c service nova] Lock "cba314de-644e-451e-8ecc-2e209d74bbce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1098.368581] env[69927]: DEBUG oslo_concurrency.lockutils [req-5451b31f-7e07-4c2c-a626-a27d7470b729 req-255ee60f-fab2-4410-8028-30185765144c service nova] Lock "cba314de-644e-451e-8ecc-2e209d74bbce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.368847] env[69927]: DEBUG nova.compute.manager [req-5451b31f-7e07-4c2c-a626-a27d7470b729 req-255ee60f-fab2-4410-8028-30185765144c service nova] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] No waiting events found dispatching network-vif-plugged-6be5b0c0-f02c-4af2-9ca7-f5792eb7bfbd {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1098.369053] env[69927]: WARNING nova.compute.manager [req-5451b31f-7e07-4c2c-a626-a27d7470b729 req-255ee60f-fab2-4410-8028-30185765144c service nova] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Received unexpected event network-vif-plugged-6be5b0c0-f02c-4af2-9ca7-f5792eb7bfbd for instance with vm_state building and task_state spawning. [ 1098.369223] env[69927]: DEBUG nova.compute.manager [req-5451b31f-7e07-4c2c-a626-a27d7470b729 req-255ee60f-fab2-4410-8028-30185765144c service nova] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Received event network-changed-6be5b0c0-f02c-4af2-9ca7-f5792eb7bfbd {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1098.369391] env[69927]: DEBUG nova.compute.manager [req-5451b31f-7e07-4c2c-a626-a27d7470b729 req-255ee60f-fab2-4410-8028-30185765144c service nova] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Refreshing instance network info cache due to event network-changed-6be5b0c0-f02c-4af2-9ca7-f5792eb7bfbd. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1098.369565] env[69927]: DEBUG oslo_concurrency.lockutils [req-5451b31f-7e07-4c2c-a626-a27d7470b729 req-255ee60f-fab2-4410-8028-30185765144c service nova] Acquiring lock "refresh_cache-cba314de-644e-451e-8ecc-2e209d74bbce" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.413368] env[69927]: DEBUG nova.network.neutron [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1098.594016] env[69927]: DEBUG nova.network.neutron [req-52dcd0b3-515b-4aa4-969f-13d0f45c3b74 req-f876ec01-3290-48c1-b7eb-5a4a15b3e097 service nova] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Updated VIF entry in instance network info cache for port 216a398c-956e-4115-ae6d-b045d946831b. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1098.594439] env[69927]: DEBUG nova.network.neutron [req-52dcd0b3-515b-4aa4-969f-13d0f45c3b74 req-f876ec01-3290-48c1-b7eb-5a4a15b3e097 service nova] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Updating instance_info_cache with network_info: [{"id": "216a398c-956e-4115-ae6d-b045d946831b", "address": "fa:16:3e:84:a5:05", "network": {"id": "625b5644-8a8f-4789-8c96-083982c1bf4c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-124597975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61b1aea0ccf049c8942ba32932412497", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap216a398c-95", "ovs_interfaceid": "216a398c-956e-4115-ae6d-b045d946831b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.603544] env[69927]: DEBUG nova.network.neutron [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Updating instance_info_cache with network_info: [{"id": "6be5b0c0-f02c-4af2-9ca7-f5792eb7bfbd", "address": "fa:16:3e:d3:52:f5", "network": {"id": "2cac2664-2b1d-4bb6-a58a-f8e96679d038", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1945522269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3afde63c8cbe4aecb32a470fd6b948f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6be5b0c0-f0", "ovs_interfaceid": "6be5b0c0-f02c-4af2-9ca7-f5792eb7bfbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.738491] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "refresh_cache-20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.738491] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "refresh_cache-20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.738491] env[69927]: DEBUG nova.network.neutron [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1098.819871] env[69927]: DEBUG nova.scheduler.client.report [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1098.852916] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096518, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.859788] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a8ad89bb-194c-4a9e-af89-3c6c937ef357 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Lock "2ae5fcf7-3111-4e80-80b0-f9c1cece1001" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.148s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1099.097334] env[69927]: DEBUG oslo_concurrency.lockutils [req-52dcd0b3-515b-4aa4-969f-13d0f45c3b74 req-f876ec01-3290-48c1-b7eb-5a4a15b3e097 service nova] Releasing lock "refresh_cache-9aa0a285-66e4-4792-bbe9-a62f76666ec6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1099.103196] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Releasing lock "refresh_cache-cba314de-644e-451e-8ecc-2e209d74bbce" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1099.103546] env[69927]: DEBUG nova.compute.manager [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Instance network_info: |[{"id": "6be5b0c0-f02c-4af2-9ca7-f5792eb7bfbd", "address": "fa:16:3e:d3:52:f5", "network": {"id": "2cac2664-2b1d-4bb6-a58a-f8e96679d038", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1945522269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3afde63c8cbe4aecb32a470fd6b948f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6be5b0c0-f0", "ovs_interfaceid": "6be5b0c0-f02c-4af2-9ca7-f5792eb7bfbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1099.103856] env[69927]: DEBUG oslo_concurrency.lockutils [req-5451b31f-7e07-4c2c-a626-a27d7470b729 req-255ee60f-fab2-4410-8028-30185765144c service nova] Acquired lock "refresh_cache-cba314de-644e-451e-8ecc-2e209d74bbce" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1099.104051] env[69927]: DEBUG nova.network.neutron [req-5451b31f-7e07-4c2c-a626-a27d7470b729 req-255ee60f-fab2-4410-8028-30185765144c service nova] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Refreshing network info cache for port 6be5b0c0-f02c-4af2-9ca7-f5792eb7bfbd {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1099.105578] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:52:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '130387c4-e4ec-4d95-8e9d-bb079baabad8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6be5b0c0-f02c-4af2-9ca7-f5792eb7bfbd', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1099.115254] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1099.116899] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1099.117314] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be206f3c-39a5-450d-85b6-68e81621b640 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.143396] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1099.143396] env[69927]: value = "task-4096519" [ 1099.143396] env[69927]: _type = "Task" [ 1099.143396] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.154068] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096519, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.288658] env[69927]: WARNING nova.network.neutron [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] b8b342c3-e0d7-4186-9541-03e865142f8a already exists in list: networks containing: ['b8b342c3-e0d7-4186-9541-03e865142f8a']. ignoring it [ 1099.328129] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.459s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1099.328864] env[69927]: DEBUG nova.compute.manager [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1099.333095] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.009s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1099.333391] env[69927]: DEBUG nova.objects.instance [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lazy-loading 'resources' on Instance uuid 44e81156-b0c7-4f68-9732-b39f41ebcd4b {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1099.364339] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096518, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.63684} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.364655] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 9aa0a285-66e4-4792-bbe9-a62f76666ec6/9aa0a285-66e4-4792-bbe9-a62f76666ec6.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1099.364924] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1099.366036] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-286dc6dd-ae7d-402b-98e5-e35bafd222eb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.373193] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1099.373193] env[69927]: value = "task-4096520" [ 1099.373193] env[69927]: _type = "Task" [ 1099.373193] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.384085] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096520, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.656292] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096519, 'name': CreateVM_Task, 'duration_secs': 0.43011} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.656734] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1099.658952] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.659559] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1099.659992] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1099.660352] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2cbdf37-9d23-48ae-8299-51901d7d517c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.666237] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1099.666237] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52c31d54-b9a5-4bd3-5010-f8d1711bf4d3" [ 1099.666237] env[69927]: _type = "Task" [ 1099.666237] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.681781] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52c31d54-b9a5-4bd3-5010-f8d1711bf4d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.739069] env[69927]: DEBUG nova.network.neutron [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Updating instance_info_cache with network_info: [{"id": "31239db7-86bd-4d24-b54f-414bd1d5a3d1", "address": "fa:16:3e:2d:be:d8", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31239db7-86", "ovs_interfaceid": "31239db7-86bd-4d24-b54f-414bd1d5a3d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5e16c6cd-8c81-4e94-97f0-79a4834e6c6f", "address": "fa:16:3e:b3:78:b9", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e16c6cd-8c", "ovs_interfaceid": "5e16c6cd-8c81-4e94-97f0-79a4834e6c6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.839412] env[69927]: DEBUG nova.compute.utils [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1099.841280] env[69927]: DEBUG nova.compute.manager [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1099.842037] env[69927]: DEBUG nova.network.neutron [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1099.891582] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096520, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.118071} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.894575] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1099.895752] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb1de8c-e9ed-4d25-9d92-524a4a95b66d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.921032] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] 9aa0a285-66e4-4792-bbe9-a62f76666ec6/9aa0a285-66e4-4792-bbe9-a62f76666ec6.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1099.922533] env[69927]: DEBUG nova.policy [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20629f26389d40199a4c5d5d2312dbae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c2fb1fc4c3ae41a5b331c6be7973eb72', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1099.926575] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c0704ee-230e-4fa5-8be5-3b028304378f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.942816] env[69927]: DEBUG nova.network.neutron [req-5451b31f-7e07-4c2c-a626-a27d7470b729 req-255ee60f-fab2-4410-8028-30185765144c service nova] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Updated VIF entry in instance network info cache for port 6be5b0c0-f02c-4af2-9ca7-f5792eb7bfbd. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1099.943698] env[69927]: DEBUG nova.network.neutron [req-5451b31f-7e07-4c2c-a626-a27d7470b729 req-255ee60f-fab2-4410-8028-30185765144c service nova] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Updating instance_info_cache with network_info: [{"id": "6be5b0c0-f02c-4af2-9ca7-f5792eb7bfbd", "address": "fa:16:3e:d3:52:f5", "network": {"id": "2cac2664-2b1d-4bb6-a58a-f8e96679d038", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1945522269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3afde63c8cbe4aecb32a470fd6b948f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6be5b0c0-f0", "ovs_interfaceid": "6be5b0c0-f02c-4af2-9ca7-f5792eb7bfbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.952587] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1099.952587] env[69927]: value = "task-4096521" [ 1099.952587] env[69927]: _type = "Task" [ 1099.952587] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.969824] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096521, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.088315] env[69927]: DEBUG nova.compute.manager [req-296e7f13-8380-43cc-9f8d-018a7e5b3f19 req-5cc2601b-773b-4d03-bd7e-cb82183a30c1 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Received event network-vif-plugged-5e16c6cd-8c81-4e94-97f0-79a4834e6c6f {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1100.088562] env[69927]: DEBUG oslo_concurrency.lockutils [req-296e7f13-8380-43cc-9f8d-018a7e5b3f19 req-5cc2601b-773b-4d03-bd7e-cb82183a30c1 service nova] Acquiring lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1100.088930] env[69927]: DEBUG oslo_concurrency.lockutils [req-296e7f13-8380-43cc-9f8d-018a7e5b3f19 req-5cc2601b-773b-4d03-bd7e-cb82183a30c1 service nova] Lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1100.089147] env[69927]: DEBUG oslo_concurrency.lockutils [req-296e7f13-8380-43cc-9f8d-018a7e5b3f19 req-5cc2601b-773b-4d03-bd7e-cb82183a30c1 service nova] Lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.089324] env[69927]: DEBUG nova.compute.manager [req-296e7f13-8380-43cc-9f8d-018a7e5b3f19 req-5cc2601b-773b-4d03-bd7e-cb82183a30c1 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] No waiting events found dispatching network-vif-plugged-5e16c6cd-8c81-4e94-97f0-79a4834e6c6f {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1100.089624] env[69927]: WARNING nova.compute.manager [req-296e7f13-8380-43cc-9f8d-018a7e5b3f19 req-5cc2601b-773b-4d03-bd7e-cb82183a30c1 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Received unexpected event network-vif-plugged-5e16c6cd-8c81-4e94-97f0-79a4834e6c6f for instance with vm_state active and task_state None. [ 1100.089700] env[69927]: DEBUG nova.compute.manager [req-296e7f13-8380-43cc-9f8d-018a7e5b3f19 req-5cc2601b-773b-4d03-bd7e-cb82183a30c1 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Received event network-changed-5e16c6cd-8c81-4e94-97f0-79a4834e6c6f {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1100.089797] env[69927]: DEBUG nova.compute.manager [req-296e7f13-8380-43cc-9f8d-018a7e5b3f19 req-5cc2601b-773b-4d03-bd7e-cb82183a30c1 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Refreshing instance network info cache due to event network-changed-5e16c6cd-8c81-4e94-97f0-79a4834e6c6f. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1100.089974] env[69927]: DEBUG oslo_concurrency.lockutils [req-296e7f13-8380-43cc-9f8d-018a7e5b3f19 req-5cc2601b-773b-4d03-bd7e-cb82183a30c1 service nova] Acquiring lock "refresh_cache-20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.178433] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52c31d54-b9a5-4bd3-5010-f8d1711bf4d3, 'name': SearchDatastore_Task, 'duration_secs': 0.013768} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.181722] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1100.182038] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1100.182417] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.182567] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1100.182803] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1100.183395] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-419f5716-3333-4702-949e-3ff0959272d3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.193707] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1100.193859] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1100.194653] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-120a27ea-be15-4a3a-a8b1-e17167b4b4d5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.202418] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1100.202418] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52fcb235-8bf0-2d2e-52c5-c6bb9ec6f2f4" [ 1100.202418] env[69927]: _type = "Task" [ 1100.202418] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.212191] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52fcb235-8bf0-2d2e-52c5-c6bb9ec6f2f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.242719] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "refresh_cache-20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1100.243520] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.243733] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1100.244455] env[69927]: DEBUG oslo_concurrency.lockutils [req-296e7f13-8380-43cc-9f8d-018a7e5b3f19 req-5cc2601b-773b-4d03-bd7e-cb82183a30c1 service nova] Acquired lock "refresh_cache-20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1100.244455] env[69927]: DEBUG nova.network.neutron [req-296e7f13-8380-43cc-9f8d-018a7e5b3f19 req-5cc2601b-773b-4d03-bd7e-cb82183a30c1 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Refreshing network info cache for port 5e16c6cd-8c81-4e94-97f0-79a4834e6c6f {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1100.246918] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0ca0f0d-4163-4268-9142-a815420e6684 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.253336] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40877aed-9b2a-453f-a47e-579577d7ee44 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.277033] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a4eeed4-cc3d-4993-a345-922d4e2ef102 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.280553] env[69927]: DEBUG nova.virt.hardware [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1100.280844] env[69927]: DEBUG nova.virt.hardware [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1100.281021] env[69927]: DEBUG nova.virt.hardware [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1100.281215] env[69927]: DEBUG nova.virt.hardware [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1100.281385] env[69927]: DEBUG nova.virt.hardware [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1100.281534] env[69927]: DEBUG nova.virt.hardware [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1100.281743] env[69927]: DEBUG nova.virt.hardware [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1100.281929] env[69927]: DEBUG nova.virt.hardware [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1100.282556] env[69927]: DEBUG nova.virt.hardware [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1100.282556] env[69927]: DEBUG nova.virt.hardware [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1100.282556] env[69927]: DEBUG nova.virt.hardware [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1100.289379] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Reconfiguring VM to attach interface {{(pid=69927) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1100.291184] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6e542d6-795a-4ef6-9310-b847dcf57264 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.340793] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa5c382a-0c01-46f3-9e3c-687160b55c7e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.344922] env[69927]: DEBUG oslo_vmware.api [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1100.344922] env[69927]: value = "task-4096522" [ 1100.344922] env[69927]: _type = "Task" [ 1100.344922] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.345702] env[69927]: DEBUG nova.compute.manager [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1100.358917] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70b27503-6d27-41be-be9f-0290207a4548 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.367104] env[69927]: DEBUG oslo_vmware.api [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096522, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.380909] env[69927]: DEBUG nova.compute.provider_tree [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1100.439348] env[69927]: DEBUG nova.network.neutron [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Successfully created port: c8607f3f-9b4c-4c0f-b6ab-590d290cf0f1 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1100.447877] env[69927]: DEBUG oslo_concurrency.lockutils [req-5451b31f-7e07-4c2c-a626-a27d7470b729 req-255ee60f-fab2-4410-8028-30185765144c service nova] Releasing lock "refresh_cache-cba314de-644e-451e-8ecc-2e209d74bbce" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1100.465542] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096521, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.713788] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52fcb235-8bf0-2d2e-52c5-c6bb9ec6f2f4, 'name': SearchDatastore_Task, 'duration_secs': 0.016142} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.714675] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb544168-01ce-4486-acc5-d7b80fa3802b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.720834] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1100.720834] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528b7104-7633-1c7d-17c9-f2c282ffee45" [ 1100.720834] env[69927]: _type = "Task" [ 1100.720834] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.729706] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528b7104-7633-1c7d-17c9-f2c282ffee45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.855983] env[69927]: DEBUG oslo_vmware.api [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096522, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.884687] env[69927]: DEBUG nova.scheduler.client.report [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1100.965684] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096521, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.992548] env[69927]: DEBUG nova.network.neutron [req-296e7f13-8380-43cc-9f8d-018a7e5b3f19 req-5cc2601b-773b-4d03-bd7e-cb82183a30c1 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Updated VIF entry in instance network info cache for port 5e16c6cd-8c81-4e94-97f0-79a4834e6c6f. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1100.993142] env[69927]: DEBUG nova.network.neutron [req-296e7f13-8380-43cc-9f8d-018a7e5b3f19 req-5cc2601b-773b-4d03-bd7e-cb82183a30c1 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Updating instance_info_cache with network_info: [{"id": "31239db7-86bd-4d24-b54f-414bd1d5a3d1", "address": "fa:16:3e:2d:be:d8", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31239db7-86", "ovs_interfaceid": "31239db7-86bd-4d24-b54f-414bd1d5a3d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5e16c6cd-8c81-4e94-97f0-79a4834e6c6f", "address": "fa:16:3e:b3:78:b9", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e16c6cd-8c", "ovs_interfaceid": "5e16c6cd-8c81-4e94-97f0-79a4834e6c6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1101.234289] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528b7104-7633-1c7d-17c9-f2c282ffee45, 'name': SearchDatastore_Task, 'duration_secs': 0.017473} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.234601] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1101.235248] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] cba314de-644e-451e-8ecc-2e209d74bbce/cba314de-644e-451e-8ecc-2e209d74bbce.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1101.235248] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e8873b2-f8b3-45a2-a4b5-bce25e128f33 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.243366] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1101.243366] env[69927]: value = "task-4096523" [ 1101.243366] env[69927]: _type = "Task" [ 1101.243366] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.253384] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096523, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.356810] env[69927]: DEBUG oslo_vmware.api [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096522, 'name': ReconfigVM_Task, 'duration_secs': 0.846212} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.357361] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1101.357581] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Reconfigured VM to attach interface {{(pid=69927) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1101.362849] env[69927]: DEBUG nova.compute.manager [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1101.390163] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.057s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1101.395278] env[69927]: DEBUG nova.virt.hardware [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1101.395610] env[69927]: DEBUG nova.virt.hardware [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1101.395822] env[69927]: DEBUG nova.virt.hardware [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1101.395994] env[69927]: DEBUG nova.virt.hardware [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1101.396160] env[69927]: DEBUG nova.virt.hardware [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1101.396316] env[69927]: DEBUG nova.virt.hardware [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1101.396533] env[69927]: DEBUG nova.virt.hardware [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1101.396723] env[69927]: DEBUG nova.virt.hardware [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1101.396897] env[69927]: DEBUG nova.virt.hardware [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1101.397083] env[69927]: DEBUG nova.virt.hardware [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1101.397280] env[69927]: DEBUG nova.virt.hardware [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1101.398640] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.283s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1101.400243] env[69927]: INFO nova.compute.claims [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1101.404717] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98532c64-382b-42af-8309-e978cf7dc30d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.414333] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7979ac00-a110-4fe2-b364-54b6b31129b1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.431722] env[69927]: INFO nova.scheduler.client.report [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Deleted allocations for instance 44e81156-b0c7-4f68-9732-b39f41ebcd4b [ 1101.466280] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096521, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.497286] env[69927]: DEBUG oslo_concurrency.lockutils [req-296e7f13-8380-43cc-9f8d-018a7e5b3f19 req-5cc2601b-773b-4d03-bd7e-cb82183a30c1 service nova] Releasing lock "refresh_cache-20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1101.644796] env[69927]: DEBUG oslo_vmware.rw_handles [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528c9b62-5d02-f1d0-df4d-28e8a82c10d7/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1101.645871] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af74ad2-bc6f-4677-b91a-43c7135839ca {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.655152] env[69927]: DEBUG oslo_vmware.rw_handles [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528c9b62-5d02-f1d0-df4d-28e8a82c10d7/disk-0.vmdk is in state: ready. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1101.655380] env[69927]: ERROR oslo_vmware.rw_handles [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528c9b62-5d02-f1d0-df4d-28e8a82c10d7/disk-0.vmdk due to incomplete transfer. [ 1101.656299] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d701b386-1dd4-4d96-9ae7-29875f045445 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.664874] env[69927]: DEBUG oslo_vmware.rw_handles [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528c9b62-5d02-f1d0-df4d-28e8a82c10d7/disk-0.vmdk. {{(pid=69927) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1101.665118] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Uploaded image 87a22900-db4f-4f48-b2cd-ab99a8178619 to the Glance image server {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1101.667589] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Destroying the VM {{(pid=69927) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1101.667974] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-3c33a954-f464-4c49-970b-6fe973e90988 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.676022] env[69927]: DEBUG oslo_vmware.api [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1101.676022] env[69927]: value = "task-4096524" [ 1101.676022] env[69927]: _type = "Task" [ 1101.676022] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.687468] env[69927]: DEBUG oslo_vmware.api [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096524, 'name': Destroy_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.754403] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096523, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.864035] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0800302e-7454-48bd-8c75-d88c9b9203a8 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "interface-20ac32b7-51fc-40bf-a667-2aeb6c8c7648-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.034s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1101.940661] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2cef688f-5e4e-45f7-aa96-d39c85e94278 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "44e81156-b0c7-4f68-9732-b39f41ebcd4b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.275s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1101.968672] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096521, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.188176] env[69927]: DEBUG oslo_vmware.api [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096524, 'name': Destroy_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.254356] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096523, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.639954} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.254638] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] cba314de-644e-451e-8ecc-2e209d74bbce/cba314de-644e-451e-8ecc-2e209d74bbce.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1102.254874] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1102.255220] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-00b6d764-7d6b-4f2a-b195-3ccfc58e3bb9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.262485] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1102.262485] env[69927]: value = "task-4096525" [ 1102.262485] env[69927]: _type = "Task" [ 1102.262485] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.271229] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096525, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.358641] env[69927]: DEBUG nova.network.neutron [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Successfully updated port: c8607f3f-9b4c-4c0f-b6ab-590d290cf0f1 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1102.467809] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096521, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.669735] env[69927]: DEBUG nova.compute.manager [req-c978f9ee-3b91-430b-81c4-381c64176f74 req-8767ca29-894b-439c-a46b-a5eb5b1e5d1d service nova] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Received event network-vif-plugged-c8607f3f-9b4c-4c0f-b6ab-590d290cf0f1 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1102.670046] env[69927]: DEBUG oslo_concurrency.lockutils [req-c978f9ee-3b91-430b-81c4-381c64176f74 req-8767ca29-894b-439c-a46b-a5eb5b1e5d1d service nova] Acquiring lock "d9347f31-b908-4561-9b57-1ea79b762168-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1102.670455] env[69927]: DEBUG oslo_concurrency.lockutils [req-c978f9ee-3b91-430b-81c4-381c64176f74 req-8767ca29-894b-439c-a46b-a5eb5b1e5d1d service nova] Lock "d9347f31-b908-4561-9b57-1ea79b762168-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1102.670455] env[69927]: DEBUG oslo_concurrency.lockutils [req-c978f9ee-3b91-430b-81c4-381c64176f74 req-8767ca29-894b-439c-a46b-a5eb5b1e5d1d service nova] Lock "d9347f31-b908-4561-9b57-1ea79b762168-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.670611] env[69927]: DEBUG nova.compute.manager [req-c978f9ee-3b91-430b-81c4-381c64176f74 req-8767ca29-894b-439c-a46b-a5eb5b1e5d1d service nova] [instance: d9347f31-b908-4561-9b57-1ea79b762168] No waiting events found dispatching network-vif-plugged-c8607f3f-9b4c-4c0f-b6ab-590d290cf0f1 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1102.670775] env[69927]: WARNING nova.compute.manager [req-c978f9ee-3b91-430b-81c4-381c64176f74 req-8767ca29-894b-439c-a46b-a5eb5b1e5d1d service nova] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Received unexpected event network-vif-plugged-c8607f3f-9b4c-4c0f-b6ab-590d290cf0f1 for instance with vm_state building and task_state spawning. [ 1102.674023] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b71abdd6-fd52-43ba-8886-d36043e133d6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.683865] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bfaeb2f-7ff7-4369-9c54-54c007e131be {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.690114] env[69927]: DEBUG oslo_vmware.api [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096524, 'name': Destroy_Task, 'duration_secs': 1.002893} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.690701] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Destroyed the VM [ 1102.690948] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Deleting Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1102.691510] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f7832a24-cd8a-4094-9113-726abcbe5ca8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.722078] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ff87b5e-9d2f-4109-be82-9e3d84059bbe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.724792] env[69927]: DEBUG oslo_vmware.api [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1102.724792] env[69927]: value = "task-4096526" [ 1102.724792] env[69927]: _type = "Task" [ 1102.724792] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.731428] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6982f4bf-6774-4d3b-ad81-32b30fdc2786 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.739553] env[69927]: DEBUG oslo_vmware.api [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096526, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.754057] env[69927]: DEBUG nova.compute.provider_tree [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1102.772745] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096525, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.229934} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.773078] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1102.773749] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64536b33-8cce-488d-9cae-bc2d5c0a87e8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.797123] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] cba314de-644e-451e-8ecc-2e209d74bbce/cba314de-644e-451e-8ecc-2e209d74bbce.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1102.797439] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ff7e60c-fe80-4875-96a1-7cb6a571c815 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.818498] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1102.818498] env[69927]: value = "task-4096527" [ 1102.818498] env[69927]: _type = "Task" [ 1102.818498] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.827065] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096527, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.863465] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "refresh_cache-d9347f31-b908-4561-9b57-1ea79b762168" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.863710] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquired lock "refresh_cache-d9347f31-b908-4561-9b57-1ea79b762168" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1102.863794] env[69927]: DEBUG nova.network.neutron [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1102.969333] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096521, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.235429] env[69927]: DEBUG oslo_vmware.api [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096526, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.258189] env[69927]: DEBUG nova.scheduler.client.report [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1103.330141] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096527, 'name': ReconfigVM_Task, 'duration_secs': 0.316948} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.330474] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Reconfigured VM instance instance-00000059 to attach disk [datastore2] cba314de-644e-451e-8ecc-2e209d74bbce/cba314de-644e-451e-8ecc-2e209d74bbce.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1103.331382] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3469ed9d-4ecb-4e75-8719-cf8f37b6083d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.339849] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1103.339849] env[69927]: value = "task-4096528" [ 1103.339849] env[69927]: _type = "Task" [ 1103.339849] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.350315] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096528, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.431108] env[69927]: DEBUG nova.network.neutron [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1103.471708] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096521, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.741831] env[69927]: DEBUG oslo_vmware.api [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096526, 'name': RemoveSnapshot_Task, 'duration_secs': 0.625245} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.742341] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Deleted Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1103.742599] env[69927]: INFO nova.compute.manager [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Took 15.04 seconds to snapshot the instance on the hypervisor. [ 1103.752035] env[69927]: DEBUG nova.network.neutron [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Updating instance_info_cache with network_info: [{"id": "c8607f3f-9b4c-4c0f-b6ab-590d290cf0f1", "address": "fa:16:3e:40:aa:c5", "network": {"id": "045356e7-ce71-4c33-9121-8655a915fc1d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1277828099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2fb1fc4c3ae41a5b331c6be7973eb72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8607f3f-9b", "ovs_interfaceid": "c8607f3f-9b4c-4c0f-b6ab-590d290cf0f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1103.767307] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.369s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1103.767909] env[69927]: DEBUG nova.compute.manager [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1103.770864] env[69927]: DEBUG oslo_concurrency.lockutils [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.373s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.771172] env[69927]: DEBUG nova.objects.instance [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Lazy-loading 'resources' on Instance uuid dd4c3963-aa58-49f2-b675-9863ff13bddf {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1103.850398] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096528, 'name': Rename_Task, 'duration_secs': 0.332738} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.850739] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1103.850971] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a8cb47e0-42c2-4dc0-80f8-ab2770d532bf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.859608] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1103.859608] env[69927]: value = "task-4096529" [ 1103.859608] env[69927]: _type = "Task" [ 1103.859608] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.868895] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096529, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.923716] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Acquiring lock "2ae5fcf7-3111-4e80-80b0-f9c1cece1001" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1103.924144] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Lock "2ae5fcf7-3111-4e80-80b0-f9c1cece1001" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.924379] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Acquiring lock "2ae5fcf7-3111-4e80-80b0-f9c1cece1001-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1103.924571] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Lock "2ae5fcf7-3111-4e80-80b0-f9c1cece1001-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.924746] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Lock "2ae5fcf7-3111-4e80-80b0-f9c1cece1001-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1103.927198] env[69927]: INFO nova.compute.manager [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Terminating instance [ 1103.969386] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096521, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.256304] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Releasing lock "refresh_cache-d9347f31-b908-4561-9b57-1ea79b762168" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1104.256666] env[69927]: DEBUG nova.compute.manager [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Instance network_info: |[{"id": "c8607f3f-9b4c-4c0f-b6ab-590d290cf0f1", "address": "fa:16:3e:40:aa:c5", "network": {"id": "045356e7-ce71-4c33-9121-8655a915fc1d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1277828099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2fb1fc4c3ae41a5b331c6be7973eb72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8607f3f-9b", "ovs_interfaceid": "c8607f3f-9b4c-4c0f-b6ab-590d290cf0f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1104.257146] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:aa:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40c947c4-f471-4d48-8e43-fee54198107e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c8607f3f-9b4c-4c0f-b6ab-590d290cf0f1', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1104.264891] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1104.265085] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1104.265333] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bb75f55d-59cb-4cb6-b16c-c79e7592a5d6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.284471] env[69927]: DEBUG nova.compute.utils [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1104.288473] env[69927]: DEBUG nova.compute.manager [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1104.288660] env[69927]: DEBUG nova.network.neutron [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1104.296515] env[69927]: DEBUG nova.compute.manager [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Found 3 images (rotation: 2) {{(pid=69927) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1104.296765] env[69927]: DEBUG nova.compute.manager [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Rotating out 1 backups {{(pid=69927) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5025}} [ 1104.296889] env[69927]: DEBUG nova.compute.manager [None req-45603c30-f0fb-4f48-b4ac-2cb647aeb9da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Deleting image f57cf18a-f065-4e7f-a53c-8cc53c2f655f {{(pid=69927) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5030}} [ 1104.300406] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1104.300406] env[69927]: value = "task-4096530" [ 1104.300406] env[69927]: _type = "Task" [ 1104.300406] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.310881] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096530, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.325198] env[69927]: DEBUG oslo_concurrency.lockutils [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "interface-20ac32b7-51fc-40bf-a667-2aeb6c8c7648-0a1c6e4f-540d-4f99-b7a9-06cb3c4271b5" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1104.325380] env[69927]: DEBUG oslo_concurrency.lockutils [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "interface-20ac32b7-51fc-40bf-a667-2aeb6c8c7648-0a1c6e4f-540d-4f99-b7a9-06cb3c4271b5" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.325759] env[69927]: DEBUG nova.objects.instance [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lazy-loading 'flavor' on Instance uuid 20ac32b7-51fc-40bf-a667-2aeb6c8c7648 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1104.345902] env[69927]: DEBUG nova.policy [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '482f17df9f874808b99defabed52bc8e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5fa2f3f0abc7474196dfbee4f8c09d3f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1104.372125] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096529, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.435689] env[69927]: DEBUG nova.compute.manager [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1104.436041] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1104.437530] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c342bc41-f410-4fc8-a40f-bdd9427ee598 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.449173] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1104.449173] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-76b274c3-06e3-4ce9-83f0-4add185cc945 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.465504] env[69927]: DEBUG oslo_vmware.api [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Waiting for the task: (returnval){ [ 1104.465504] env[69927]: value = "task-4096531" [ 1104.465504] env[69927]: _type = "Task" [ 1104.465504] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.478910] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096521, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.486584] env[69927]: DEBUG oslo_vmware.api [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Task: {'id': task-4096531, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.619807] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08b95a29-bd18-478c-94e8-529dd4bbeece {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.629212] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e7304fe-1685-44e3-b894-497733d2a667 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.662093] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-affe95ed-79ba-45d4-bbfa-ffef477b4165 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.671965] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1122ae3-cb75-41be-aef9-eb10301596ed {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.687478] env[69927]: DEBUG nova.compute.provider_tree [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1104.765399] env[69927]: DEBUG nova.compute.manager [req-f1db3710-f441-46a4-abd7-7a1c5d14f9d1 req-f7eacad6-f7f0-4234-b62b-aafedbb23f36 service nova] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Received event network-changed-c8607f3f-9b4c-4c0f-b6ab-590d290cf0f1 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1104.765594] env[69927]: DEBUG nova.compute.manager [req-f1db3710-f441-46a4-abd7-7a1c5d14f9d1 req-f7eacad6-f7f0-4234-b62b-aafedbb23f36 service nova] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Refreshing instance network info cache due to event network-changed-c8607f3f-9b4c-4c0f-b6ab-590d290cf0f1. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1104.765810] env[69927]: DEBUG oslo_concurrency.lockutils [req-f1db3710-f441-46a4-abd7-7a1c5d14f9d1 req-f7eacad6-f7f0-4234-b62b-aafedbb23f36 service nova] Acquiring lock "refresh_cache-d9347f31-b908-4561-9b57-1ea79b762168" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.765962] env[69927]: DEBUG oslo_concurrency.lockutils [req-f1db3710-f441-46a4-abd7-7a1c5d14f9d1 req-f7eacad6-f7f0-4234-b62b-aafedbb23f36 service nova] Acquired lock "refresh_cache-d9347f31-b908-4561-9b57-1ea79b762168" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1104.766136] env[69927]: DEBUG nova.network.neutron [req-f1db3710-f441-46a4-abd7-7a1c5d14f9d1 req-f7eacad6-f7f0-4234-b62b-aafedbb23f36 service nova] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Refreshing network info cache for port c8607f3f-9b4c-4c0f-b6ab-590d290cf0f1 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1104.771667] env[69927]: DEBUG nova.network.neutron [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Successfully created port: 7e7e497c-f90e-44b0-a0dc-e400b4b57c0c {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1104.792135] env[69927]: DEBUG nova.compute.utils [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1104.812694] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096530, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.872969] env[69927]: DEBUG oslo_vmware.api [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096529, 'name': PowerOnVM_Task, 'duration_secs': 0.635889} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.873268] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1104.873474] env[69927]: INFO nova.compute.manager [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Took 7.98 seconds to spawn the instance on the hypervisor. [ 1104.873656] env[69927]: DEBUG nova.compute.manager [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1104.874535] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d95fd1-ac24-4867-8748-c6fb4e122a71 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.935281] env[69927]: DEBUG nova.objects.instance [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lazy-loading 'pci_requests' on Instance uuid 20ac32b7-51fc-40bf-a667-2aeb6c8c7648 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1104.973347] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096521, 'name': ReconfigVM_Task, 'duration_secs': 4.662475} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.974071] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Reconfigured VM instance instance-00000058 to attach disk [datastore2] 9aa0a285-66e4-4792-bbe9-a62f76666ec6/9aa0a285-66e4-4792-bbe9-a62f76666ec6.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1104.975852] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-41b23092-f3fe-42db-b982-af48ec9362cf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.980650] env[69927]: DEBUG oslo_vmware.api [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Task: {'id': task-4096531, 'name': PowerOffVM_Task, 'duration_secs': 0.249501} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.981253] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1104.981427] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1104.981675] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f244afe2-81cf-4707-93a7-afd55fbbf83f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.986046] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1104.986046] env[69927]: value = "task-4096532" [ 1104.986046] env[69927]: _type = "Task" [ 1104.986046] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.994719] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096532, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.056409] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1105.056653] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1105.056998] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Deleting the datastore file [datastore1] 2ae5fcf7-3111-4e80-80b0-f9c1cece1001 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1105.057244] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e9a1f6f6-af63-409d-91ab-2bbe7fb533dd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.065395] env[69927]: DEBUG oslo_vmware.api [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Waiting for the task: (returnval){ [ 1105.065395] env[69927]: value = "task-4096534" [ 1105.065395] env[69927]: _type = "Task" [ 1105.065395] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.079370] env[69927]: DEBUG oslo_vmware.api [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Task: {'id': task-4096534, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.190787] env[69927]: DEBUG nova.scheduler.client.report [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1105.294536] env[69927]: DEBUG nova.compute.manager [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1105.312668] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096530, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.392326] env[69927]: INFO nova.compute.manager [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Took 38.30 seconds to build instance. [ 1105.439168] env[69927]: DEBUG nova.objects.base [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Object Instance<20ac32b7-51fc-40bf-a667-2aeb6c8c7648> lazy-loaded attributes: flavor,pci_requests {{(pid=69927) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1105.439524] env[69927]: DEBUG nova.network.neutron [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1105.499679] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096532, 'name': Rename_Task, 'duration_secs': 0.180381} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.499936] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1105.500209] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bc08fdf2-393c-4e61-9f96-9c0f3a76530e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.506941] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1105.506941] env[69927]: value = "task-4096535" [ 1105.506941] env[69927]: _type = "Task" [ 1105.506941] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.515902] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096535, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.537889] env[69927]: DEBUG nova.policy [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ed20f23b4104e2ea75ea29b804c79d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ed984d7170742eca7e89bf3bf45e6ae', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1105.540490] env[69927]: DEBUG nova.network.neutron [req-f1db3710-f441-46a4-abd7-7a1c5d14f9d1 req-f7eacad6-f7f0-4234-b62b-aafedbb23f36 service nova] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Updated VIF entry in instance network info cache for port c8607f3f-9b4c-4c0f-b6ab-590d290cf0f1. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1105.540614] env[69927]: DEBUG nova.network.neutron [req-f1db3710-f441-46a4-abd7-7a1c5d14f9d1 req-f7eacad6-f7f0-4234-b62b-aafedbb23f36 service nova] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Updating instance_info_cache with network_info: [{"id": "c8607f3f-9b4c-4c0f-b6ab-590d290cf0f1", "address": "fa:16:3e:40:aa:c5", "network": {"id": "045356e7-ce71-4c33-9121-8655a915fc1d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1277828099-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2fb1fc4c3ae41a5b331c6be7973eb72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8607f3f-9b", "ovs_interfaceid": "c8607f3f-9b4c-4c0f-b6ab-590d290cf0f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.576828] env[69927]: DEBUG oslo_vmware.api [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Task: {'id': task-4096534, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139296} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.577170] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1105.577413] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1105.577594] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1105.577914] env[69927]: INFO nova.compute.manager [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1105.578070] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1105.578271] env[69927]: DEBUG nova.compute.manager [-] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1105.578366] env[69927]: DEBUG nova.network.neutron [-] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1105.696500] env[69927]: DEBUG oslo_concurrency.lockutils [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.926s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1105.700262] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 20.299s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.738111] env[69927]: INFO nova.scheduler.client.report [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Deleted allocations for instance dd4c3963-aa58-49f2-b675-9863ff13bddf [ 1105.814017] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096530, 'name': CreateVM_Task, 'duration_secs': 1.424147} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.814196] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1105.815254] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.815430] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1105.815821] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1105.816114] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48f8a9a7-d256-463e-a0e3-1909e295c3e6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.822881] env[69927]: DEBUG oslo_vmware.api [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1105.822881] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52db2981-6999-24db-d114-2088ae20969c" [ 1105.822881] env[69927]: _type = "Task" [ 1105.822881] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.836014] env[69927]: DEBUG oslo_vmware.api [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52db2981-6999-24db-d114-2088ae20969c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.893287] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e9470d1c-25b0-49a5-929e-d9ee378efe5d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "cba314de-644e-451e-8ecc-2e209d74bbce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.822s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.018764] env[69927]: DEBUG oslo_vmware.api [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096535, 'name': PowerOnVM_Task, 'duration_secs': 0.478445} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.019031] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1106.019248] env[69927]: INFO nova.compute.manager [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Took 11.66 seconds to spawn the instance on the hypervisor. [ 1106.019411] env[69927]: DEBUG nova.compute.manager [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1106.020213] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d45fb4c9-e3ef-484c-8d47-46df3d33dc7e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.044141] env[69927]: DEBUG oslo_concurrency.lockutils [req-f1db3710-f441-46a4-abd7-7a1c5d14f9d1 req-f7eacad6-f7f0-4234-b62b-aafedbb23f36 service nova] Releasing lock "refresh_cache-d9347f31-b908-4561-9b57-1ea79b762168" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1106.248819] env[69927]: DEBUG oslo_concurrency.lockutils [None req-88ba8a65-bc86-4e44-81be-187b22606691 tempest-InstanceActionsV221TestJSON-1577346487 tempest-InstanceActionsV221TestJSON-1577346487-project-member] Lock "dd4c3963-aa58-49f2-b675-9863ff13bddf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.304s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.305397] env[69927]: DEBUG nova.compute.manager [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1106.338767] env[69927]: DEBUG oslo_vmware.api [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52db2981-6999-24db-d114-2088ae20969c, 'name': SearchDatastore_Task, 'duration_secs': 0.01031} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.340455] env[69927]: DEBUG nova.virt.hardware [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:42:05Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='167809972',id=35,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-1697912576',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1106.340751] env[69927]: DEBUG nova.virt.hardware [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1106.340970] env[69927]: DEBUG nova.virt.hardware [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1106.341257] env[69927]: DEBUG nova.virt.hardware [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1106.341464] env[69927]: DEBUG nova.virt.hardware [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1106.341672] env[69927]: DEBUG nova.virt.hardware [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1106.341941] env[69927]: DEBUG nova.virt.hardware [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1106.342219] env[69927]: DEBUG nova.virt.hardware [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1106.342381] env[69927]: DEBUG nova.virt.hardware [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1106.342617] env[69927]: DEBUG nova.virt.hardware [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1106.342854] env[69927]: DEBUG nova.virt.hardware [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1106.343264] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1106.343566] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1106.343822] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.343969] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1106.344229] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1106.345065] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd8261d-d8cf-41ed-940f-4a9a9a9c1a27 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.347754] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cf774114-646a-4c12-8250-e5083204efcd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.357256] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb03ea5f-c3fb-4507-a13a-5b18eaa5f4f0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.364117] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1106.364475] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1106.365963] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fa72eb5-c044-4bf1-b8fe-7879e9fb3988 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.382338] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7487e216-920d-466f-985a-ffa24458fc79 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "cba314de-644e-451e-8ecc-2e209d74bbce" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1106.382338] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7487e216-920d-466f-985a-ffa24458fc79 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "cba314de-644e-451e-8ecc-2e209d74bbce" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1106.382338] env[69927]: DEBUG nova.compute.manager [None req-7487e216-920d-466f-985a-ffa24458fc79 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1106.383540] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b8bc7f0-9ca5-472b-a22e-bc57e9f64526 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.387404] env[69927]: DEBUG oslo_vmware.api [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1106.387404] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521e0ae5-6d53-e895-057f-342442a642cd" [ 1106.387404] env[69927]: _type = "Task" [ 1106.387404] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.392459] env[69927]: DEBUG nova.compute.manager [None req-7487e216-920d-466f-985a-ffa24458fc79 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69927) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1106.392540] env[69927]: DEBUG nova.objects.instance [None req-7487e216-920d-466f-985a-ffa24458fc79 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lazy-loading 'flavor' on Instance uuid cba314de-644e-451e-8ecc-2e209d74bbce {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1106.401984] env[69927]: DEBUG oslo_vmware.api [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521e0ae5-6d53-e895-057f-342442a642cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.409274] env[69927]: DEBUG nova.compute.manager [req-6a7786ca-775f-4340-bb56-72e4e98b289a req-f74f0d37-43a0-44be-a66f-a43052090543 service nova] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Received event network-vif-deleted-a1e7adcd-1321-49ab-a876-14b8e90db77c {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1106.409463] env[69927]: INFO nova.compute.manager [req-6a7786ca-775f-4340-bb56-72e4e98b289a req-f74f0d37-43a0-44be-a66f-a43052090543 service nova] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Neutron deleted interface a1e7adcd-1321-49ab-a876-14b8e90db77c; detaching it from the instance and deleting it from the info cache [ 1106.409634] env[69927]: DEBUG nova.network.neutron [req-6a7786ca-775f-4340-bb56-72e4e98b289a req-f74f0d37-43a0-44be-a66f-a43052090543 service nova] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.503655] env[69927]: DEBUG nova.network.neutron [-] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.542063] env[69927]: DEBUG nova.network.neutron [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Successfully updated port: 7e7e497c-f90e-44b0-a0dc-e400b4b57c0c {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1106.547655] env[69927]: INFO nova.compute.manager [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Took 39.83 seconds to build instance. [ 1106.738429] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 256319c4-817d-4267-8531-a65f0f8cd0b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1106.738570] env[69927]: WARNING nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 21b7b237-557e-4030-93bb-6b5ce417e53c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1106.738710] env[69927]: WARNING nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance e1b3d0bc-a251-4dbd-89a6-216a2f2c1313 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1106.738832] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 4b7934f8-2c97-480b-8af7-f09f6819e2b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1106.738966] env[69927]: WARNING nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 0c8e43a3-3f33-4a41-81d3-a98565dca4a7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1106.739098] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance a2b1684f-82af-42fc-925e-db36f31cfe63 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1106.739233] env[69927]: WARNING nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1106.739361] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1106.739476] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 20ac32b7-51fc-40bf-a667-2aeb6c8c7648 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1106.739589] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1106.739711] env[69927]: WARNING nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1106.739827] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 2ae5fcf7-3111-4e80-80b0-f9c1cece1001 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1106.739939] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 9aa0a285-66e4-4792-bbe9-a62f76666ec6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1106.740141] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance cba314de-644e-451e-8ecc-2e209d74bbce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1106.740270] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance d9347f31-b908-4561-9b57-1ea79b762168 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1106.740384] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 406828cc-c6aa-4686-827d-c7c8e28ffb8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1106.898842] env[69927]: DEBUG oslo_vmware.api [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521e0ae5-6d53-e895-057f-342442a642cd, 'name': SearchDatastore_Task, 'duration_secs': 0.013545} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.899812] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd529cce-330e-4f31-9d53-55c787e2dc2f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.906412] env[69927]: DEBUG oslo_vmware.api [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1106.906412] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522f34b7-dfbf-bd96-63b1-16d0a885f24b" [ 1106.906412] env[69927]: _type = "Task" [ 1106.906412] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.912596] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-13ee420e-8e97-428f-87fa-22b69fe76747 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.917654] env[69927]: DEBUG oslo_vmware.api [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522f34b7-dfbf-bd96-63b1-16d0a885f24b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.926115] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fc67fe6-9ac9-4313-b07e-ba2df38ab142 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.960817] env[69927]: DEBUG nova.compute.manager [req-6a7786ca-775f-4340-bb56-72e4e98b289a req-f74f0d37-43a0-44be-a66f-a43052090543 service nova] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Detach interface failed, port_id=a1e7adcd-1321-49ab-a876-14b8e90db77c, reason: Instance 2ae5fcf7-3111-4e80-80b0-f9c1cece1001 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1107.007015] env[69927]: INFO nova.compute.manager [-] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Took 1.43 seconds to deallocate network for instance. [ 1107.046262] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Acquiring lock "refresh_cache-406828cc-c6aa-4686-827d-c7c8e28ffb8e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.046408] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Acquired lock "refresh_cache-406828cc-c6aa-4686-827d-c7c8e28ffb8e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1107.046566] env[69927]: DEBUG nova.network.neutron [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1107.051367] env[69927]: DEBUG oslo_concurrency.lockutils [None req-52d99673-b8aa-4094-9058-c0ce52bc1343 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "9aa0a285-66e4-4792-bbe9-a62f76666ec6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.339s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1107.219896] env[69927]: DEBUG nova.compute.manager [req-94b2f425-1b8d-4956-97ae-192df9358d47 req-dc2da961-8c26-48bb-bb15-718252e19663 service nova] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Received event network-vif-plugged-7e7e497c-f90e-44b0-a0dc-e400b4b57c0c {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1107.219896] env[69927]: DEBUG oslo_concurrency.lockutils [req-94b2f425-1b8d-4956-97ae-192df9358d47 req-dc2da961-8c26-48bb-bb15-718252e19663 service nova] Acquiring lock "406828cc-c6aa-4686-827d-c7c8e28ffb8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1107.219896] env[69927]: DEBUG oslo_concurrency.lockutils [req-94b2f425-1b8d-4956-97ae-192df9358d47 req-dc2da961-8c26-48bb-bb15-718252e19663 service nova] Lock "406828cc-c6aa-4686-827d-c7c8e28ffb8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1107.219896] env[69927]: DEBUG oslo_concurrency.lockutils [req-94b2f425-1b8d-4956-97ae-192df9358d47 req-dc2da961-8c26-48bb-bb15-718252e19663 service nova] Lock "406828cc-c6aa-4686-827d-c7c8e28ffb8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1107.219896] env[69927]: DEBUG nova.compute.manager [req-94b2f425-1b8d-4956-97ae-192df9358d47 req-dc2da961-8c26-48bb-bb15-718252e19663 service nova] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] No waiting events found dispatching network-vif-plugged-7e7e497c-f90e-44b0-a0dc-e400b4b57c0c {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1107.219896] env[69927]: WARNING nova.compute.manager [req-94b2f425-1b8d-4956-97ae-192df9358d47 req-dc2da961-8c26-48bb-bb15-718252e19663 service nova] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Received unexpected event network-vif-plugged-7e7e497c-f90e-44b0-a0dc-e400b4b57c0c for instance with vm_state building and task_state spawning. [ 1107.219896] env[69927]: DEBUG nova.compute.manager [req-94b2f425-1b8d-4956-97ae-192df9358d47 req-dc2da961-8c26-48bb-bb15-718252e19663 service nova] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Received event network-changed-7e7e497c-f90e-44b0-a0dc-e400b4b57c0c {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1107.221303] env[69927]: DEBUG nova.compute.manager [req-94b2f425-1b8d-4956-97ae-192df9358d47 req-dc2da961-8c26-48bb-bb15-718252e19663 service nova] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Refreshing instance network info cache due to event network-changed-7e7e497c-f90e-44b0-a0dc-e400b4b57c0c. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1107.221303] env[69927]: DEBUG oslo_concurrency.lockutils [req-94b2f425-1b8d-4956-97ae-192df9358d47 req-dc2da961-8c26-48bb-bb15-718252e19663 service nova] Acquiring lock "refresh_cache-406828cc-c6aa-4686-827d-c7c8e28ffb8e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.243397] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance b422d5c9-f580-4d07-9d13-af307571bf48 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 1107.261143] env[69927]: DEBUG nova.network.neutron [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Successfully updated port: 0a1c6e4f-540d-4f99-b7a9-06cb3c4271b5 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1107.412788] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-7487e216-920d-466f-985a-ffa24458fc79 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1107.412788] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9be35609-28fb-4a0d-83d3-5e92639ced4e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.423093] env[69927]: DEBUG oslo_vmware.api [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522f34b7-dfbf-bd96-63b1-16d0a885f24b, 'name': SearchDatastore_Task, 'duration_secs': 0.010085} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.424076] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1107.424347] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] d9347f31-b908-4561-9b57-1ea79b762168/d9347f31-b908-4561-9b57-1ea79b762168.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1107.424679] env[69927]: DEBUG oslo_vmware.api [None req-7487e216-920d-466f-985a-ffa24458fc79 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1107.424679] env[69927]: value = "task-4096536" [ 1107.424679] env[69927]: _type = "Task" [ 1107.424679] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.424863] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b33a5b70-bc92-41b9-959e-4b3ed6e82e3f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.435654] env[69927]: DEBUG oslo_vmware.api [None req-7487e216-920d-466f-985a-ffa24458fc79 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096536, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.437208] env[69927]: DEBUG oslo_vmware.api [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1107.437208] env[69927]: value = "task-4096537" [ 1107.437208] env[69927]: _type = "Task" [ 1107.437208] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.445930] env[69927]: DEBUG oslo_vmware.api [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096537, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.515094] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1107.593469] env[69927]: DEBUG nova.network.neutron [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1107.596742] env[69927]: DEBUG oslo_concurrency.lockutils [None req-32aaab10-9be3-497a-8a28-405ebc6f445c tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "4b7934f8-2c97-480b-8af7-f09f6819e2b6" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1107.597146] env[69927]: DEBUG oslo_concurrency.lockutils [None req-32aaab10-9be3-497a-8a28-405ebc6f445c tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "4b7934f8-2c97-480b-8af7-f09f6819e2b6" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1107.597424] env[69927]: DEBUG nova.compute.manager [None req-32aaab10-9be3-497a-8a28-405ebc6f445c tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1107.598699] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fffe4520-9804-4716-9386-742f125817f1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.611120] env[69927]: DEBUG nova.compute.manager [None req-32aaab10-9be3-497a-8a28-405ebc6f445c tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69927) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1107.611835] env[69927]: DEBUG nova.objects.instance [None req-32aaab10-9be3-497a-8a28-405ebc6f445c tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lazy-loading 'flavor' on Instance uuid 4b7934f8-2c97-480b-8af7-f09f6819e2b6 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1107.746637] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 1107.746772] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1107.746998] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2624MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1107.764718] env[69927]: DEBUG oslo_concurrency.lockutils [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "refresh_cache-20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.764931] env[69927]: DEBUG oslo_concurrency.lockutils [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "refresh_cache-20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1107.765138] env[69927]: DEBUG nova.network.neutron [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1107.769438] env[69927]: DEBUG nova.network.neutron [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Updating instance_info_cache with network_info: [{"id": "7e7e497c-f90e-44b0-a0dc-e400b4b57c0c", "address": "fa:16:3e:91:44:bb", "network": {"id": "59c5a413-ed97-4651-a37f-e9ed6e46972e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1832571141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fa2f3f0abc7474196dfbee4f8c09d3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4a9e02-45f1-4afb-8abb-0de26b153086", "external-id": "nsx-vlan-transportzone-336", "segmentation_id": 336, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e7e497c-f9", "ovs_interfaceid": "7e7e497c-f90e-44b0-a0dc-e400b4b57c0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.937630] env[69927]: DEBUG oslo_vmware.api [None req-7487e216-920d-466f-985a-ffa24458fc79 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096536, 'name': PowerOffVM_Task, 'duration_secs': 0.23943} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.940814] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-7487e216-920d-466f-985a-ffa24458fc79 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1107.941016] env[69927]: DEBUG nova.compute.manager [None req-7487e216-920d-466f-985a-ffa24458fc79 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1107.945387] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9db37a6-3319-4b50-8c69-24f578a4f3f7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.961995] env[69927]: DEBUG oslo_vmware.api [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096537, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501366} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.962090] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] d9347f31-b908-4561-9b57-1ea79b762168/d9347f31-b908-4561-9b57-1ea79b762168.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1107.962408] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1107.965393] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6c26da54-276b-498d-b99b-eb794e82240f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.974051] env[69927]: DEBUG oslo_vmware.api [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1107.974051] env[69927]: value = "task-4096538" [ 1107.974051] env[69927]: _type = "Task" [ 1107.974051] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.983243] env[69927]: DEBUG oslo_vmware.api [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096538, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.029508] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d48b289-673c-4654-8269-b44cc3d5e675 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.038594] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f987c9e5-38fa-43cd-b3a0-2e441335ebb3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.076208] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-781d93f6-f998-4d21-aefb-1213af8c4e15 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.088413] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37668cb2-77c1-477a-b918-457d72cf51c5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.102070] env[69927]: DEBUG nova.compute.provider_tree [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1108.277147] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Releasing lock "refresh_cache-406828cc-c6aa-4686-827d-c7c8e28ffb8e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1108.277740] env[69927]: DEBUG nova.compute.manager [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Instance network_info: |[{"id": "7e7e497c-f90e-44b0-a0dc-e400b4b57c0c", "address": "fa:16:3e:91:44:bb", "network": {"id": "59c5a413-ed97-4651-a37f-e9ed6e46972e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1832571141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fa2f3f0abc7474196dfbee4f8c09d3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4a9e02-45f1-4afb-8abb-0de26b153086", "external-id": "nsx-vlan-transportzone-336", "segmentation_id": 336, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e7e497c-f9", "ovs_interfaceid": "7e7e497c-f90e-44b0-a0dc-e400b4b57c0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1108.278392] env[69927]: DEBUG oslo_concurrency.lockutils [req-94b2f425-1b8d-4956-97ae-192df9358d47 req-dc2da961-8c26-48bb-bb15-718252e19663 service nova] Acquired lock "refresh_cache-406828cc-c6aa-4686-827d-c7c8e28ffb8e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1108.278943] env[69927]: DEBUG nova.network.neutron [req-94b2f425-1b8d-4956-97ae-192df9358d47 req-dc2da961-8c26-48bb-bb15-718252e19663 service nova] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Refreshing network info cache for port 7e7e497c-f90e-44b0-a0dc-e400b4b57c0c {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1108.281107] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:44:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea4a9e02-45f1-4afb-8abb-0de26b153086', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e7e497c-f90e-44b0-a0dc-e400b4b57c0c', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1108.289111] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1108.296688] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1108.298626] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-71838316-dd1f-431a-86a8-04959fb1d689 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.325908] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1108.325908] env[69927]: value = "task-4096539" [ 1108.325908] env[69927]: _type = "Task" [ 1108.325908] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.329935] env[69927]: WARNING nova.network.neutron [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] b8b342c3-e0d7-4186-9541-03e865142f8a already exists in list: networks containing: ['b8b342c3-e0d7-4186-9541-03e865142f8a']. ignoring it [ 1108.330236] env[69927]: WARNING nova.network.neutron [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] b8b342c3-e0d7-4186-9541-03e865142f8a already exists in list: networks containing: ['b8b342c3-e0d7-4186-9541-03e865142f8a']. ignoring it [ 1108.338275] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096539, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.463578] env[69927]: DEBUG nova.compute.manager [req-4f715742-3659-44a1-b492-acf2ed71ac2d req-b4a70b8b-bdb9-4f84-8277-0f839e7a1aaa service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Received event network-vif-plugged-0a1c6e4f-540d-4f99-b7a9-06cb3c4271b5 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1108.463720] env[69927]: DEBUG oslo_concurrency.lockutils [req-4f715742-3659-44a1-b492-acf2ed71ac2d req-b4a70b8b-bdb9-4f84-8277-0f839e7a1aaa service nova] Acquiring lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1108.463935] env[69927]: DEBUG oslo_concurrency.lockutils [req-4f715742-3659-44a1-b492-acf2ed71ac2d req-b4a70b8b-bdb9-4f84-8277-0f839e7a1aaa service nova] Lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1108.464112] env[69927]: DEBUG oslo_concurrency.lockutils [req-4f715742-3659-44a1-b492-acf2ed71ac2d req-b4a70b8b-bdb9-4f84-8277-0f839e7a1aaa service nova] Lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.464278] env[69927]: DEBUG nova.compute.manager [req-4f715742-3659-44a1-b492-acf2ed71ac2d req-b4a70b8b-bdb9-4f84-8277-0f839e7a1aaa service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] No waiting events found dispatching network-vif-plugged-0a1c6e4f-540d-4f99-b7a9-06cb3c4271b5 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1108.464492] env[69927]: WARNING nova.compute.manager [req-4f715742-3659-44a1-b492-acf2ed71ac2d req-b4a70b8b-bdb9-4f84-8277-0f839e7a1aaa service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Received unexpected event network-vif-plugged-0a1c6e4f-540d-4f99-b7a9-06cb3c4271b5 for instance with vm_state active and task_state None. [ 1108.464680] env[69927]: DEBUG nova.compute.manager [req-4f715742-3659-44a1-b492-acf2ed71ac2d req-b4a70b8b-bdb9-4f84-8277-0f839e7a1aaa service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Received event network-changed-0a1c6e4f-540d-4f99-b7a9-06cb3c4271b5 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1108.464858] env[69927]: DEBUG nova.compute.manager [req-4f715742-3659-44a1-b492-acf2ed71ac2d req-b4a70b8b-bdb9-4f84-8277-0f839e7a1aaa service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Refreshing instance network info cache due to event network-changed-0a1c6e4f-540d-4f99-b7a9-06cb3c4271b5. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1108.465082] env[69927]: DEBUG oslo_concurrency.lockutils [req-4f715742-3659-44a1-b492-acf2ed71ac2d req-b4a70b8b-bdb9-4f84-8277-0f839e7a1aaa service nova] Acquiring lock "refresh_cache-20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.468389] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7487e216-920d-466f-985a-ffa24458fc79 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "cba314de-644e-451e-8ecc-2e209d74bbce" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.086s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.487433] env[69927]: DEBUG oslo_vmware.api [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096538, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.327118} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.491143] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1108.492420] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6fbbe42-1b81-4ece-befb-9f3976d5c1b4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.518188] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] d9347f31-b908-4561-9b57-1ea79b762168/d9347f31-b908-4561-9b57-1ea79b762168.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1108.521445] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1831646a-e6af-4048-aa9a-add398c6a14a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.545043] env[69927]: DEBUG oslo_vmware.api [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1108.545043] env[69927]: value = "task-4096540" [ 1108.545043] env[69927]: _type = "Task" [ 1108.545043] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.555067] env[69927]: DEBUG oslo_vmware.api [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096540, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.605523] env[69927]: DEBUG nova.scheduler.client.report [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1108.618782] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-32aaab10-9be3-497a-8a28-405ebc6f445c tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1108.619128] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0f1e9790-58ff-4984-9f51-0326585e3044 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.628439] env[69927]: DEBUG oslo_vmware.api [None req-32aaab10-9be3-497a-8a28-405ebc6f445c tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1108.628439] env[69927]: value = "task-4096541" [ 1108.628439] env[69927]: _type = "Task" [ 1108.628439] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.640750] env[69927]: DEBUG oslo_vmware.api [None req-32aaab10-9be3-497a-8a28-405ebc6f445c tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096541, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.667443] env[69927]: DEBUG nova.network.neutron [req-94b2f425-1b8d-4956-97ae-192df9358d47 req-dc2da961-8c26-48bb-bb15-718252e19663 service nova] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Updated VIF entry in instance network info cache for port 7e7e497c-f90e-44b0-a0dc-e400b4b57c0c. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1108.667889] env[69927]: DEBUG nova.network.neutron [req-94b2f425-1b8d-4956-97ae-192df9358d47 req-dc2da961-8c26-48bb-bb15-718252e19663 service nova] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Updating instance_info_cache with network_info: [{"id": "7e7e497c-f90e-44b0-a0dc-e400b4b57c0c", "address": "fa:16:3e:91:44:bb", "network": {"id": "59c5a413-ed97-4651-a37f-e9ed6e46972e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1832571141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fa2f3f0abc7474196dfbee4f8c09d3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4a9e02-45f1-4afb-8abb-0de26b153086", "external-id": "nsx-vlan-transportzone-336", "segmentation_id": 336, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e7e497c-f9", "ovs_interfaceid": "7e7e497c-f90e-44b0-a0dc-e400b4b57c0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1108.756871] env[69927]: DEBUG nova.compute.manager [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Stashing vm_state: active {{(pid=69927) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1108.838541] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096539, 'name': CreateVM_Task, 'duration_secs': 0.393213} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.838736] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1108.839530] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.839700] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1108.840084] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1108.840377] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0eb647a1-e1ac-46fe-a0ec-3d85e531a374 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.848637] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for the task: (returnval){ [ 1108.848637] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5263d394-2881-c760-9e7c-fd493f505ccf" [ 1108.848637] env[69927]: _type = "Task" [ 1108.848637] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.860105] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5263d394-2881-c760-9e7c-fd493f505ccf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.008419] env[69927]: DEBUG nova.network.neutron [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Updating instance_info_cache with network_info: [{"id": "31239db7-86bd-4d24-b54f-414bd1d5a3d1", "address": "fa:16:3e:2d:be:d8", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31239db7-86", "ovs_interfaceid": "31239db7-86bd-4d24-b54f-414bd1d5a3d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5e16c6cd-8c81-4e94-97f0-79a4834e6c6f", "address": "fa:16:3e:b3:78:b9", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e16c6cd-8c", "ovs_interfaceid": "5e16c6cd-8c81-4e94-97f0-79a4834e6c6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0a1c6e4f-540d-4f99-b7a9-06cb3c4271b5", "address": "fa:16:3e:70:c9:50", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a1c6e4f-54", "ovs_interfaceid": "0a1c6e4f-540d-4f99-b7a9-06cb3c4271b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.057858] env[69927]: DEBUG oslo_vmware.api [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096540, 'name': ReconfigVM_Task, 'duration_secs': 0.382969} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.058205] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Reconfigured VM instance instance-0000005a to attach disk [datastore1] d9347f31-b908-4561-9b57-1ea79b762168/d9347f31-b908-4561-9b57-1ea79b762168.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1109.058854] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ff7f6361-8649-4e3c-8b06-fbe3b3e8907b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.066248] env[69927]: DEBUG oslo_vmware.api [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1109.066248] env[69927]: value = "task-4096542" [ 1109.066248] env[69927]: _type = "Task" [ 1109.066248] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.080102] env[69927]: DEBUG oslo_vmware.api [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096542, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.111091] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69927) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1109.111345] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.412s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1109.111841] env[69927]: DEBUG oslo_concurrency.lockutils [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.622s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1109.112043] env[69927]: DEBUG oslo_concurrency.lockutils [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1109.114327] env[69927]: DEBUG oslo_concurrency.lockutils [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.742s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1109.115823] env[69927]: INFO nova.compute.claims [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1109.141220] env[69927]: DEBUG oslo_vmware.api [None req-32aaab10-9be3-497a-8a28-405ebc6f445c tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096541, 'name': PowerOffVM_Task, 'duration_secs': 0.225825} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.141497] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-32aaab10-9be3-497a-8a28-405ebc6f445c tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1109.141724] env[69927]: DEBUG nova.compute.manager [None req-32aaab10-9be3-497a-8a28-405ebc6f445c tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1109.142543] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2e0c326-3052-41f8-aa6f-15f5798729c4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.146712] env[69927]: INFO nova.scheduler.client.report [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Deleted allocations for instance ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2 [ 1109.170390] env[69927]: DEBUG oslo_concurrency.lockutils [req-94b2f425-1b8d-4956-97ae-192df9358d47 req-dc2da961-8c26-48bb-bb15-718252e19663 service nova] Releasing lock "refresh_cache-406828cc-c6aa-4686-827d-c7c8e28ffb8e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1109.280569] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1109.360977] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5263d394-2881-c760-9e7c-fd493f505ccf, 'name': SearchDatastore_Task, 'duration_secs': 0.02024} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.361335] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1109.361574] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1109.361817] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.361969] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1109.362245] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1109.362530] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f4897a22-6a39-488e-b868-d4e0ecb318d9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.372600] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1109.372819] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1109.374224] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ebc22cf-9274-4798-be4a-1ab64f800af8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.381660] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for the task: (returnval){ [ 1109.381660] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5203fd3e-4851-b524-c4d6-81cdf9406ed9" [ 1109.381660] env[69927]: _type = "Task" [ 1109.381660] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.390713] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5203fd3e-4851-b524-c4d6-81cdf9406ed9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.512877] env[69927]: DEBUG oslo_concurrency.lockutils [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "refresh_cache-20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1109.513195] env[69927]: DEBUG oslo_concurrency.lockutils [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.513365] env[69927]: DEBUG oslo_concurrency.lockutils [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1109.513676] env[69927]: DEBUG oslo_concurrency.lockutils [req-4f715742-3659-44a1-b492-acf2ed71ac2d req-b4a70b8b-bdb9-4f84-8277-0f839e7a1aaa service nova] Acquired lock "refresh_cache-20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1109.513911] env[69927]: DEBUG nova.network.neutron [req-4f715742-3659-44a1-b492-acf2ed71ac2d req-b4a70b8b-bdb9-4f84-8277-0f839e7a1aaa service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Refreshing network info cache for port 0a1c6e4f-540d-4f99-b7a9-06cb3c4271b5 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1109.515718] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90e477d9-3746-4ef5-b975-54d66fa9f336 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.535034] env[69927]: DEBUG nova.virt.hardware [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1109.535034] env[69927]: DEBUG nova.virt.hardware [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1109.535034] env[69927]: DEBUG nova.virt.hardware [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1109.535034] env[69927]: DEBUG nova.virt.hardware [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1109.535486] env[69927]: DEBUG nova.virt.hardware [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1109.535486] env[69927]: DEBUG nova.virt.hardware [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1109.535546] env[69927]: DEBUG nova.virt.hardware [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1109.535689] env[69927]: DEBUG nova.virt.hardware [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1109.535852] env[69927]: DEBUG nova.virt.hardware [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1109.536025] env[69927]: DEBUG nova.virt.hardware [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1109.536207] env[69927]: DEBUG nova.virt.hardware [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1109.543034] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Reconfiguring VM to attach interface {{(pid=69927) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1109.544094] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c3020f6-f604-4a56-95ba-3fc74e45f926 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.563287] env[69927]: DEBUG oslo_vmware.api [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1109.563287] env[69927]: value = "task-4096543" [ 1109.563287] env[69927]: _type = "Task" [ 1109.563287] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.575604] env[69927]: DEBUG oslo_vmware.api [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096543, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.578657] env[69927]: DEBUG oslo_vmware.api [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096542, 'name': Rename_Task, 'duration_secs': 0.159078} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.578922] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1109.579178] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-feff155e-c4fc-480c-b5d0-5ebbe707f05b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.586612] env[69927]: DEBUG oslo_vmware.api [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1109.586612] env[69927]: value = "task-4096544" [ 1109.586612] env[69927]: _type = "Task" [ 1109.586612] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.599667] env[69927]: DEBUG oslo_vmware.api [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096544, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.610421] env[69927]: DEBUG oslo_concurrency.lockutils [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "cba314de-644e-451e-8ecc-2e209d74bbce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1109.610421] env[69927]: DEBUG oslo_concurrency.lockutils [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "cba314de-644e-451e-8ecc-2e209d74bbce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1109.610421] env[69927]: DEBUG oslo_concurrency.lockutils [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "cba314de-644e-451e-8ecc-2e209d74bbce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1109.610582] env[69927]: DEBUG oslo_concurrency.lockutils [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "cba314de-644e-451e-8ecc-2e209d74bbce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1109.611241] env[69927]: DEBUG oslo_concurrency.lockutils [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "cba314de-644e-451e-8ecc-2e209d74bbce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1109.613133] env[69927]: INFO nova.compute.manager [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Terminating instance [ 1109.660050] env[69927]: DEBUG oslo_concurrency.lockutils [None req-959a9101-4951-4a60-8265-57f5e7d83cf5 tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Lock "ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.180s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1109.662031] env[69927]: DEBUG oslo_concurrency.lockutils [None req-32aaab10-9be3-497a-8a28-405ebc6f445c tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "4b7934f8-2c97-480b-8af7-f09f6819e2b6" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.064s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1109.894040] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5203fd3e-4851-b524-c4d6-81cdf9406ed9, 'name': SearchDatastore_Task, 'duration_secs': 0.024699} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.894521] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83227f23-4258-48c0-b580-4dc041979301 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.902812] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for the task: (returnval){ [ 1109.902812] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528b354a-bb9d-1f45-9bfb-8b0ed1134676" [ 1109.902812] env[69927]: _type = "Task" [ 1109.902812] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.911660] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528b354a-bb9d-1f45-9bfb-8b0ed1134676, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.077257] env[69927]: DEBUG oslo_vmware.api [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096543, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.100755] env[69927]: DEBUG oslo_vmware.api [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096544, 'name': PowerOnVM_Task, 'duration_secs': 0.512907} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.101067] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1110.101280] env[69927]: INFO nova.compute.manager [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Took 8.74 seconds to spawn the instance on the hypervisor. [ 1110.101465] env[69927]: DEBUG nova.compute.manager [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1110.102512] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6433d4c3-cfb2-48c0-807b-e4151d514052 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.121895] env[69927]: DEBUG nova.compute.manager [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1110.122162] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1110.123427] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86911db8-0a93-4a91-8022-7e151cc22cf4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.135181] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1110.135482] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-990f7df6-a3ca-40d4-b4c0-07f586b9c4b7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.213539] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1110.213812] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1110.214066] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Deleting the datastore file [datastore2] cba314de-644e-451e-8ecc-2e209d74bbce {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1110.214348] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-29d5726e-75c5-4dff-8219-1343d34364c7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.223757] env[69927]: DEBUG oslo_vmware.api [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1110.223757] env[69927]: value = "task-4096546" [ 1110.223757] env[69927]: _type = "Task" [ 1110.223757] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.244930] env[69927]: DEBUG oslo_vmware.api [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096546, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.314900] env[69927]: DEBUG nova.network.neutron [req-4f715742-3659-44a1-b492-acf2ed71ac2d req-b4a70b8b-bdb9-4f84-8277-0f839e7a1aaa service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Updated VIF entry in instance network info cache for port 0a1c6e4f-540d-4f99-b7a9-06cb3c4271b5. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1110.316566] env[69927]: DEBUG nova.network.neutron [req-4f715742-3659-44a1-b492-acf2ed71ac2d req-b4a70b8b-bdb9-4f84-8277-0f839e7a1aaa service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Updating instance_info_cache with network_info: [{"id": "31239db7-86bd-4d24-b54f-414bd1d5a3d1", "address": "fa:16:3e:2d:be:d8", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31239db7-86", "ovs_interfaceid": "31239db7-86bd-4d24-b54f-414bd1d5a3d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5e16c6cd-8c81-4e94-97f0-79a4834e6c6f", "address": "fa:16:3e:b3:78:b9", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e16c6cd-8c", "ovs_interfaceid": "5e16c6cd-8c81-4e94-97f0-79a4834e6c6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0a1c6e4f-540d-4f99-b7a9-06cb3c4271b5", "address": "fa:16:3e:70:c9:50", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a1c6e4f-54", "ovs_interfaceid": "0a1c6e4f-540d-4f99-b7a9-06cb3c4271b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.418991] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528b354a-bb9d-1f45-9bfb-8b0ed1134676, 'name': SearchDatastore_Task, 'duration_secs': 0.011164} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.419652] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1110.419652] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 406828cc-c6aa-4686-827d-c7c8e28ffb8e/406828cc-c6aa-4686-827d-c7c8e28ffb8e.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1110.419936] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-10eed10c-8184-4d65-ac64-c48202d3dce8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.430524] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for the task: (returnval){ [ 1110.430524] env[69927]: value = "task-4096547" [ 1110.430524] env[69927]: _type = "Task" [ 1110.430524] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.442253] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096547, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.462395] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d60be019-c681-4d3d-90ae-ea912178d8a7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.474648] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072dc21d-edaa-4e1c-bb68-d57d183d3c99 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.517342] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06e791a8-3237-42ea-b7e7-db26b5657e42 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.526453] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f92d306-f653-46b0-b1f9-c6c6e67f50a2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.543434] env[69927]: DEBUG nova.compute.provider_tree [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1110.577769] env[69927]: DEBUG oslo_vmware.api [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096543, 'name': ReconfigVM_Task, 'duration_secs': 0.951357} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.578332] env[69927]: DEBUG oslo_concurrency.lockutils [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1110.578620] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Reconfigured VM to attach interface {{(pid=69927) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1110.629902] env[69927]: INFO nova.compute.manager [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Took 35.06 seconds to build instance. [ 1110.741277] env[69927]: DEBUG oslo_vmware.api [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096546, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.295187} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.741277] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1110.741277] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1110.741277] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1110.741277] env[69927]: INFO nova.compute.manager [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1110.741692] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1110.741692] env[69927]: DEBUG nova.compute.manager [-] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1110.741784] env[69927]: DEBUG nova.network.neutron [-] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1110.819287] env[69927]: DEBUG oslo_concurrency.lockutils [req-4f715742-3659-44a1-b492-acf2ed71ac2d req-b4a70b8b-bdb9-4f84-8277-0f839e7a1aaa service nova] Releasing lock "refresh_cache-20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1110.948766] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096547, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.046593] env[69927]: DEBUG nova.scheduler.client.report [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1111.083879] env[69927]: DEBUG oslo_concurrency.lockutils [None req-12907131-57cf-4780-91dd-a5d280b85244 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "interface-20ac32b7-51fc-40bf-a667-2aeb6c8c7648-0a1c6e4f-540d-4f99-b7a9-06cb3c4271b5" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.758s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.131954] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5e05c02c-cacd-4f19-85d0-533185422273 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "d9347f31-b908-4561-9b57-1ea79b762168" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.569s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.272515] env[69927]: DEBUG nova.compute.manager [req-e686b274-daf3-4cd9-9366-c70fc821198b req-e2570570-8ade-4aba-8f5d-b687da71a884 service nova] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Received event network-vif-deleted-6be5b0c0-f02c-4af2-9ca7-f5792eb7bfbd {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1111.272641] env[69927]: INFO nova.compute.manager [req-e686b274-daf3-4cd9-9366-c70fc821198b req-e2570570-8ade-4aba-8f5d-b687da71a884 service nova] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Neutron deleted interface 6be5b0c0-f02c-4af2-9ca7-f5792eb7bfbd; detaching it from the instance and deleting it from the info cache [ 1111.272816] env[69927]: DEBUG nova.network.neutron [req-e686b274-daf3-4cd9-9366-c70fc821198b req-e2570570-8ade-4aba-8f5d-b687da71a884 service nova] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.378070] env[69927]: DEBUG nova.compute.manager [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Stashing vm_state: stopped {{(pid=69927) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1111.444326] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096547, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.624772} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.446069] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 406828cc-c6aa-4686-827d-c7c8e28ffb8e/406828cc-c6aa-4686-827d-c7c8e28ffb8e.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1111.446069] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1111.446069] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-456dc98d-7897-406d-9e62-d06f911f3d62 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.455592] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for the task: (returnval){ [ 1111.455592] env[69927]: value = "task-4096548" [ 1111.455592] env[69927]: _type = "Task" [ 1111.455592] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.468034] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096548, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.545023] env[69927]: DEBUG nova.network.neutron [-] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.552963] env[69927]: DEBUG oslo_concurrency.lockutils [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.439s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.553534] env[69927]: DEBUG nova.compute.manager [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1111.558428] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.471s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.559305] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.561668] env[69927]: DEBUG oslo_concurrency.lockutils [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.595s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.561668] env[69927]: DEBUG oslo_concurrency.lockutils [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.564836] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.456s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.565141] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.567986] env[69927]: DEBUG oslo_concurrency.lockutils [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.543s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.569833] env[69927]: INFO nova.compute.claims [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1111.603789] env[69927]: INFO nova.scheduler.client.report [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Deleted allocations for instance 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5 [ 1111.610859] env[69927]: INFO nova.scheduler.client.report [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Deleted allocations for instance 21b7b237-557e-4030-93bb-6b5ce417e53c [ 1111.620078] env[69927]: INFO nova.scheduler.client.report [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Deleted allocations for instance e1b3d0bc-a251-4dbd-89a6-216a2f2c1313 [ 1111.776194] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-352c7066-92d6-4be1-bf70-4de0259ba64a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.786845] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-193cdd2d-7f2f-4d28-b748-5332a6952436 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.822040] env[69927]: DEBUG nova.compute.manager [req-e686b274-daf3-4cd9-9366-c70fc821198b req-e2570570-8ade-4aba-8f5d-b687da71a884 service nova] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Detach interface failed, port_id=6be5b0c0-f02c-4af2-9ca7-f5792eb7bfbd, reason: Instance cba314de-644e-451e-8ecc-2e209d74bbce could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1111.898413] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.966772] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096548, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.247494} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.967127] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1111.968155] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e33dab8-244d-4b58-aeab-685500695f88 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.999295] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 406828cc-c6aa-4686-827d-c7c8e28ffb8e/406828cc-c6aa-4686-827d-c7c8e28ffb8e.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1111.999295] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4d88557-7412-48ac-a3bf-2c3f27b77401 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.024046] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for the task: (returnval){ [ 1112.024046] env[69927]: value = "task-4096549" [ 1112.024046] env[69927]: _type = "Task" [ 1112.024046] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.033387] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096549, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.051695] env[69927]: INFO nova.compute.manager [-] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Took 1.31 seconds to deallocate network for instance. [ 1112.076898] env[69927]: DEBUG nova.compute.utils [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1112.082816] env[69927]: DEBUG nova.compute.manager [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1112.083679] env[69927]: DEBUG nova.network.neutron [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1112.121190] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c75d50ae-6288-4768-9054-321d58814227 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.509s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.122978] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d3ede1c2-5cb6-42f0-a44f-a17e786fe6b6 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "21b7b237-557e-4030-93bb-6b5ce417e53c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.824s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.133080] env[69927]: DEBUG oslo_concurrency.lockutils [None req-10281646-013b-4fb1-8616-d8bc61fe71d1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "e1b3d0bc-a251-4dbd-89a6-216a2f2c1313" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.116s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.159156] env[69927]: DEBUG nova.policy [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '76414b2ae1aa4ab582c2b59fd4218005', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '544f2a021144492ba1aea46ce6075e53', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1112.536700] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096549, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.561735] env[69927]: DEBUG oslo_concurrency.lockutils [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.584329] env[69927]: DEBUG nova.compute.manager [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1112.653252] env[69927]: DEBUG nova.network.neutron [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Successfully created port: b15acf58-fac9-4e30-aaf1-9b212850bf3e {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1112.686388] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0e806360-d386-420a-b5f9-0d015a6af8c6 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "a2b1684f-82af-42fc-925e-db36f31cfe63" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.686388] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0e806360-d386-420a-b5f9-0d015a6af8c6 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "a2b1684f-82af-42fc-925e-db36f31cfe63" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.910426] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f28c0d4-6a73-4769-9768-d6ba35c81170 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.914937] env[69927]: DEBUG nova.compute.manager [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1112.915635] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7afa6528-4b14-4e62-a674-04a40f39b555 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.925086] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190f2e5a-f30c-48f1-9582-912b0644f34d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.964209] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75ff23ee-231c-47b9-ae2f-36a42a11d828 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.975376] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9779e8c2-b122-4498-8186-5c2be6afb849 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.996990] env[69927]: DEBUG nova.compute.provider_tree [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1113.036164] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096549, 'name': ReconfigVM_Task, 'duration_secs': 0.691636} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.036454] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 406828cc-c6aa-4686-827d-c7c8e28ffb8e/406828cc-c6aa-4686-827d-c7c8e28ffb8e.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1113.036783] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=69927) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 1113.037890] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-8c213d92-694b-4591-9901-b35cccef2c8f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.043551] env[69927]: DEBUG oslo_concurrency.lockutils [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "256319c4-817d-4267-8531-a65f0f8cd0b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.043790] env[69927]: DEBUG oslo_concurrency.lockutils [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "256319c4-817d-4267-8531-a65f0f8cd0b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.044031] env[69927]: DEBUG oslo_concurrency.lockutils [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "256319c4-817d-4267-8531-a65f0f8cd0b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.044240] env[69927]: DEBUG oslo_concurrency.lockutils [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "256319c4-817d-4267-8531-a65f0f8cd0b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.044240] env[69927]: DEBUG oslo_concurrency.lockutils [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "256319c4-817d-4267-8531-a65f0f8cd0b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.047194] env[69927]: INFO nova.compute.manager [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Terminating instance [ 1113.050271] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for the task: (returnval){ [ 1113.050271] env[69927]: value = "task-4096550" [ 1113.050271] env[69927]: _type = "Task" [ 1113.050271] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.061629] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096550, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.191230] env[69927]: INFO nova.compute.manager [None req-0e806360-d386-420a-b5f9-0d015a6af8c6 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Detaching volume d0e7e15a-d1ef-48e6-8980-78f809252ca0 [ 1113.238166] env[69927]: INFO nova.virt.block_device [None req-0e806360-d386-420a-b5f9-0d015a6af8c6 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Attempting to driver detach volume d0e7e15a-d1ef-48e6-8980-78f809252ca0 from mountpoint /dev/sdb [ 1113.238166] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e806360-d386-420a-b5f9-0d015a6af8c6 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Volume detach. Driver type: vmdk {{(pid=69927) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1113.238166] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e806360-d386-420a-b5f9-0d015a6af8c6 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811505', 'volume_id': 'd0e7e15a-d1ef-48e6-8980-78f809252ca0', 'name': 'volume-d0e7e15a-d1ef-48e6-8980-78f809252ca0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'a2b1684f-82af-42fc-925e-db36f31cfe63', 'attached_at': '', 'detached_at': '', 'volume_id': 'd0e7e15a-d1ef-48e6-8980-78f809252ca0', 'serial': 'd0e7e15a-d1ef-48e6-8980-78f809252ca0'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1113.238620] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f3f34c-f9ce-40cf-80e5-e6d19e2328fb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.270663] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2fedeb5-58b8-4aa6-8c99-12cedd7fa784 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.285177] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9cefd3b-8d90-46bc-8f18-245638d10fb6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.309578] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2619bd9a-fe4b-415c-ae69-b979669f09b4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.328411] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e806360-d386-420a-b5f9-0d015a6af8c6 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] The volume has not been displaced from its original location: [datastore1] volume-d0e7e15a-d1ef-48e6-8980-78f809252ca0/volume-d0e7e15a-d1ef-48e6-8980-78f809252ca0.vmdk. No consolidation needed. {{(pid=69927) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1113.334540] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e806360-d386-420a-b5f9-0d015a6af8c6 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Reconfiguring VM instance instance-00000032 to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1113.334712] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7da795df-6e6a-46ab-856b-bedf8d3f00c9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.356304] env[69927]: DEBUG oslo_vmware.api [None req-0e806360-d386-420a-b5f9-0d015a6af8c6 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1113.356304] env[69927]: value = "task-4096551" [ 1113.356304] env[69927]: _type = "Task" [ 1113.356304] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.366791] env[69927]: DEBUG oslo_vmware.api [None req-0e806360-d386-420a-b5f9-0d015a6af8c6 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096551, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.436233] env[69927]: INFO nova.compute.manager [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] instance snapshotting [ 1113.441460] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f9b8b1-9825-4e3e-84d3-5b860218692b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.462074] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98eec382-db2e-4641-b396-0d8865e376f5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.497051] env[69927]: DEBUG oslo_concurrency.lockutils [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "interface-20ac32b7-51fc-40bf-a667-2aeb6c8c7648-5e16c6cd-8c81-4e94-97f0-79a4834e6c6f" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.497256] env[69927]: DEBUG oslo_concurrency.lockutils [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "interface-20ac32b7-51fc-40bf-a667-2aeb6c8c7648-5e16c6cd-8c81-4e94-97f0-79a4834e6c6f" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.501884] env[69927]: DEBUG nova.scheduler.client.report [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1113.558751] env[69927]: DEBUG nova.compute.manager [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1113.558751] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1113.558751] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e932f06-b295-40da-8ce5-cd8977ea3e15 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.571511] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096550, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.056009} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.574100] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=69927) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 1113.574604] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1113.576577] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87564255-7d9c-42d3-992e-6ec8b6b5f65b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.578560] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8a9833fb-e026-4899-8735-15146e1cb3e8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.602404] env[69927]: DEBUG nova.compute.manager [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1113.612895] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 406828cc-c6aa-4686-827d-c7c8e28ffb8e/ephemeral_0.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1113.615722] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4a5b150-f3e8-4ebe-a30e-cd1ec8d07595 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.629544] env[69927]: DEBUG oslo_vmware.api [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 1113.629544] env[69927]: value = "task-4096552" [ 1113.629544] env[69927]: _type = "Task" [ 1113.629544] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.639837] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for the task: (returnval){ [ 1113.639837] env[69927]: value = "task-4096553" [ 1113.639837] env[69927]: _type = "Task" [ 1113.639837] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.643330] env[69927]: DEBUG oslo_vmware.api [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096552, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.646378] env[69927]: DEBUG nova.virt.hardware [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1113.646667] env[69927]: DEBUG nova.virt.hardware [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1113.647018] env[69927]: DEBUG nova.virt.hardware [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1113.647252] env[69927]: DEBUG nova.virt.hardware [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1113.647410] env[69927]: DEBUG nova.virt.hardware [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1113.647586] env[69927]: DEBUG nova.virt.hardware [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1113.647930] env[69927]: DEBUG nova.virt.hardware [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1113.648201] env[69927]: DEBUG nova.virt.hardware [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1113.648445] env[69927]: DEBUG nova.virt.hardware [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1113.648650] env[69927]: DEBUG nova.virt.hardware [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1113.648862] env[69927]: DEBUG nova.virt.hardware [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1113.653720] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f560c209-e76a-4311-bdb4-1d3e0afb0e60 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.663105] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096553, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.666751] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d37cecf8-51f6-431a-ab02-8b125e072ca4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.867405] env[69927]: DEBUG oslo_vmware.api [None req-0e806360-d386-420a-b5f9-0d015a6af8c6 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096551, 'name': ReconfigVM_Task, 'duration_secs': 0.350845} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.867679] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e806360-d386-420a-b5f9-0d015a6af8c6 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Reconfigured VM instance instance-00000032 to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1113.873065] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-132a3d4c-b2f6-4fd1-ace0-69a3b5a4bc4a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.891167] env[69927]: DEBUG oslo_vmware.api [None req-0e806360-d386-420a-b5f9-0d015a6af8c6 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1113.891167] env[69927]: value = "task-4096554" [ 1113.891167] env[69927]: _type = "Task" [ 1113.891167] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.903453] env[69927]: DEBUG oslo_vmware.api [None req-0e806360-d386-420a-b5f9-0d015a6af8c6 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096554, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.974026] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Creating Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1113.975756] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9721033f-dc4d-4dd9-9c73-c17656791fe9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.983928] env[69927]: DEBUG oslo_vmware.api [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1113.983928] env[69927]: value = "task-4096555" [ 1113.983928] env[69927]: _type = "Task" [ 1113.983928] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.996113] env[69927]: DEBUG oslo_vmware.api [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096555, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.008035] env[69927]: DEBUG oslo_concurrency.lockutils [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.008035] env[69927]: DEBUG oslo_concurrency.lockutils [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1114.008617] env[69927]: DEBUG oslo_concurrency.lockutils [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.441s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1114.009183] env[69927]: DEBUG nova.compute.manager [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1114.016217] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb6bb735-25bb-463c-9ec3-5c1d820f66de {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.016453] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.753s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.016453] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1114.018742] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.504s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.018971] env[69927]: DEBUG nova.objects.instance [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Lazy-loading 'resources' on Instance uuid 2ae5fcf7-3111-4e80-80b0-f9c1cece1001 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1114.048172] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131ee3b6-d44f-4501-86d7-252aeae14a12 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.091459] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Reconfiguring VM to detach interface {{(pid=69927) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1114.093509] env[69927]: INFO nova.scheduler.client.report [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Deleted allocations for instance 0c8e43a3-3f33-4a41-81d3-a98565dca4a7 [ 1114.094490] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa05eb0d-5e75-4bb2-8b5d-a575cc889ee3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.125218] env[69927]: DEBUG oslo_vmware.api [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1114.125218] env[69927]: value = "task-4096556" [ 1114.125218] env[69927]: _type = "Task" [ 1114.125218] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.136346] env[69927]: DEBUG oslo_vmware.api [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096556, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.146200] env[69927]: DEBUG oslo_vmware.api [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096552, 'name': PowerOffVM_Task, 'duration_secs': 0.275527} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.150583] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1114.152379] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1114.152379] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b9f60cad-b694-4f48-a5fe-aebf45e7bf39 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.161279] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096553, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.235462] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1114.235730] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1114.236048] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Deleting the datastore file [datastore1] 256319c4-817d-4267-8531-a65f0f8cd0b6 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1114.236555] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-933ed375-6c9a-45b5-ab20-7275e2b90434 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.250901] env[69927]: DEBUG oslo_vmware.api [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for the task: (returnval){ [ 1114.250901] env[69927]: value = "task-4096558" [ 1114.250901] env[69927]: _type = "Task" [ 1114.250901] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.265383] env[69927]: DEBUG oslo_vmware.api [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096558, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.400631] env[69927]: DEBUG oslo_vmware.api [None req-0e806360-d386-420a-b5f9-0d015a6af8c6 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096554, 'name': ReconfigVM_Task, 'duration_secs': 0.343229} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.400631] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e806360-d386-420a-b5f9-0d015a6af8c6 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811505', 'volume_id': 'd0e7e15a-d1ef-48e6-8980-78f809252ca0', 'name': 'volume-d0e7e15a-d1ef-48e6-8980-78f809252ca0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'a2b1684f-82af-42fc-925e-db36f31cfe63', 'attached_at': '', 'detached_at': '', 'volume_id': 'd0e7e15a-d1ef-48e6-8980-78f809252ca0', 'serial': 'd0e7e15a-d1ef-48e6-8980-78f809252ca0'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1114.497250] env[69927]: DEBUG oslo_vmware.api [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096555, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.524174] env[69927]: DEBUG nova.compute.utils [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1114.532967] env[69927]: DEBUG nova.compute.manager [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1114.532967] env[69927]: DEBUG nova.network.neutron [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1114.573034] env[69927]: DEBUG nova.network.neutron [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Successfully updated port: b15acf58-fac9-4e30-aaf1-9b212850bf3e {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1114.624323] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7ecbb82a-ca7d-49e5-bbf8-e56d60f57968 tempest-AttachVolumeTestJSON-218421131 tempest-AttachVolumeTestJSON-218421131-project-member] Lock "0c8e43a3-3f33-4a41-81d3-a98565dca4a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.675s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1114.638274] env[69927]: DEBUG oslo_vmware.api [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.662102] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096553, 'name': ReconfigVM_Task, 'duration_secs': 0.595016} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.662915] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 406828cc-c6aa-4686-827d-c7c8e28ffb8e/ephemeral_0.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1114.665361] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e0457d75-559d-4ca0-a3a1-c95f55f8e2a8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.669994] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "15c44d86-829f-4317-ab66-9e61d4fb4dd0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1114.670487] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "15c44d86-829f-4317-ab66-9e61d4fb4dd0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.679921] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for the task: (returnval){ [ 1114.679921] env[69927]: value = "task-4096559" [ 1114.679921] env[69927]: _type = "Task" [ 1114.679921] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.695666] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096559, 'name': Rename_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.768116] env[69927]: DEBUG oslo_vmware.api [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Task: {'id': task-4096558, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.35477} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.768624] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1114.769066] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1114.769391] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1114.769696] env[69927]: INFO nova.compute.manager [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1114.770125] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1114.774746] env[69927]: DEBUG nova.compute.manager [-] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1114.774910] env[69927]: DEBUG nova.network.neutron [-] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1114.833589] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647f0bd5-0734-4d35-afc5-762e59b26172 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.838874] env[69927]: DEBUG nova.policy [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b76afa4e15844b0688a2cb888841035d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86f00841a7dc4ae9b184f19e5f847095', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1114.846969] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2871318a-3255-487e-bd2e-03e40ee51620 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.886080] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d39a4b98-5e6a-4965-a9f0-0ed45608a27a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.894959] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e2717fa-0a97-4397-b8f7-ed2bdcf5cb1c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.913144] env[69927]: DEBUG nova.compute.provider_tree [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1114.960297] env[69927]: DEBUG nova.objects.instance [None req-0e806360-d386-420a-b5f9-0d015a6af8c6 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lazy-loading 'flavor' on Instance uuid a2b1684f-82af-42fc-925e-db36f31cfe63 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1114.998510] env[69927]: DEBUG oslo_vmware.api [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096555, 'name': CreateSnapshot_Task, 'duration_secs': 0.835736} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.999117] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Created Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1115.000136] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f3bcc8b-1d9d-45d5-92f6-caf5893809e1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.033839] env[69927]: DEBUG nova.compute.manager [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1115.078947] env[69927]: DEBUG oslo_concurrency.lockutils [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "refresh_cache-b422d5c9-f580-4d07-9d13-af307571bf48" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.081514] env[69927]: DEBUG oslo_concurrency.lockutils [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired lock "refresh_cache-b422d5c9-f580-4d07-9d13-af307571bf48" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1115.081514] env[69927]: DEBUG nova.network.neutron [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1115.146697] env[69927]: DEBUG oslo_vmware.api [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.173599] env[69927]: DEBUG nova.compute.manager [req-17ced68d-5d12-43aa-b0d6-c4570e958573 req-fade73ed-4da2-4096-9f76-795898e170d4 service nova] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Received event network-vif-plugged-b15acf58-fac9-4e30-aaf1-9b212850bf3e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1115.174203] env[69927]: DEBUG oslo_concurrency.lockutils [req-17ced68d-5d12-43aa-b0d6-c4570e958573 req-fade73ed-4da2-4096-9f76-795898e170d4 service nova] Acquiring lock "b422d5c9-f580-4d07-9d13-af307571bf48-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.174203] env[69927]: DEBUG oslo_concurrency.lockutils [req-17ced68d-5d12-43aa-b0d6-c4570e958573 req-fade73ed-4da2-4096-9f76-795898e170d4 service nova] Lock "b422d5c9-f580-4d07-9d13-af307571bf48-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.174203] env[69927]: DEBUG oslo_concurrency.lockutils [req-17ced68d-5d12-43aa-b0d6-c4570e958573 req-fade73ed-4da2-4096-9f76-795898e170d4 service nova] Lock "b422d5c9-f580-4d07-9d13-af307571bf48-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.174373] env[69927]: DEBUG nova.compute.manager [req-17ced68d-5d12-43aa-b0d6-c4570e958573 req-fade73ed-4da2-4096-9f76-795898e170d4 service nova] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] No waiting events found dispatching network-vif-plugged-b15acf58-fac9-4e30-aaf1-9b212850bf3e {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1115.174517] env[69927]: WARNING nova.compute.manager [req-17ced68d-5d12-43aa-b0d6-c4570e958573 req-fade73ed-4da2-4096-9f76-795898e170d4 service nova] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Received unexpected event network-vif-plugged-b15acf58-fac9-4e30-aaf1-9b212850bf3e for instance with vm_state building and task_state spawning. [ 1115.178324] env[69927]: DEBUG nova.compute.manager [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1115.198064] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096559, 'name': Rename_Task, 'duration_secs': 0.406001} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.199828] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1115.201863] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dc98f1ee-fd86-4e8c-9b19-735cd61f1093 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.212104] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for the task: (returnval){ [ 1115.212104] env[69927]: value = "task-4096560" [ 1115.212104] env[69927]: _type = "Task" [ 1115.212104] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.223094] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096560, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.419333] env[69927]: DEBUG nova.scheduler.client.report [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1115.524453] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Creating linked-clone VM from snapshot {{(pid=69927) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1115.525182] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9f337c7a-7e77-477a-b33b-b3a898a727ba {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.535669] env[69927]: DEBUG oslo_vmware.api [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1115.535669] env[69927]: value = "task-4096561" [ 1115.535669] env[69927]: _type = "Task" [ 1115.535669] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.542837] env[69927]: INFO nova.virt.block_device [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Booting with volume 110b2a96-6541-4296-9d43-a5c1d0562ba9 at /dev/sda [ 1115.548853] env[69927]: DEBUG oslo_vmware.api [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096561, 'name': CloneVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.592833] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c3c501a2-4e1d-4c8b-a3a4-866ccf794b4f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.604771] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6275ee27-b85c-4689-9c06-ec6bee71ae73 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.618643] env[69927]: DEBUG nova.network.neutron [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Successfully created port: 358ca610-8c4b-4e27-99df-97c37c69074f {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1115.622125] env[69927]: DEBUG nova.network.neutron [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1115.653805] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6b125612-453e-4e8f-b5d1-ba3bf54469aa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.655996] env[69927]: DEBUG oslo_vmware.api [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.666798] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b982149-a61a-472f-a05c-fab36b4755ba {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.690779] env[69927]: DEBUG nova.network.neutron [-] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.697846] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "1b22fbb0-8628-4c69-b9b4-d6d294c7458b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.698121] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "1b22fbb0-8628-4c69-b9b4-d6d294c7458b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.730610] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.734876] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3e50477-0bad-4f2c-b14c-75a0c59a153c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.750650] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f8206e-5ba2-482f-9a15-9c026eec5a71 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.753599] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096560, 'name': PowerOnVM_Task} progress is 81%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.767313] env[69927]: DEBUG nova.virt.block_device [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Updating existing volume attachment record: 491ef702-7927-45b7-9214-0f0acb787cb3 {{(pid=69927) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1115.785108] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "cff307ed-3c8b-4126-9749-1204597cbf6c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.785108] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "cff307ed-3c8b-4126-9749-1204597cbf6c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.824137] env[69927]: DEBUG nova.network.neutron [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Updating instance_info_cache with network_info: [{"id": "b15acf58-fac9-4e30-aaf1-9b212850bf3e", "address": "fa:16:3e:95:69:23", "network": {"id": "5e9a6068-d0f1-44b2-b2a2-b87ebc0d53f2", "bridge": "br-int", "label": "tempest-ServersTestJSON-2067990706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "544f2a021144492ba1aea46ce6075e53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb15acf58-fa", "ovs_interfaceid": "b15acf58-fac9-4e30-aaf1-9b212850bf3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.927385] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.908s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.930740] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 6.650s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.954918] env[69927]: INFO nova.scheduler.client.report [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Deleted allocations for instance 2ae5fcf7-3111-4e80-80b0-f9c1cece1001 [ 1115.970757] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0e806360-d386-420a-b5f9-0d015a6af8c6 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "a2b1684f-82af-42fc-925e-db36f31cfe63" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.285s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1116.047679] env[69927]: DEBUG oslo_vmware.api [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096561, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.138700] env[69927]: DEBUG oslo_vmware.api [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.199378] env[69927]: INFO nova.compute.manager [-] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Took 1.42 seconds to deallocate network for instance. [ 1116.202197] env[69927]: DEBUG nova.compute.manager [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1116.244181] env[69927]: DEBUG oslo_vmware.api [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096560, 'name': PowerOnVM_Task, 'duration_secs': 0.952985} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.244500] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1116.244790] env[69927]: INFO nova.compute.manager [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Took 9.94 seconds to spawn the instance on the hypervisor. [ 1116.245070] env[69927]: DEBUG nova.compute.manager [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1116.246237] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a2e2b21-4dd8-4238-8bd8-b128e4ae4e50 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.285536] env[69927]: DEBUG nova.compute.manager [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1116.326875] env[69927]: DEBUG oslo_concurrency.lockutils [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Releasing lock "refresh_cache-b422d5c9-f580-4d07-9d13-af307571bf48" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1116.330456] env[69927]: DEBUG nova.compute.manager [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Instance network_info: |[{"id": "b15acf58-fac9-4e30-aaf1-9b212850bf3e", "address": "fa:16:3e:95:69:23", "network": {"id": "5e9a6068-d0f1-44b2-b2a2-b87ebc0d53f2", "bridge": "br-int", "label": "tempest-ServersTestJSON-2067990706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "544f2a021144492ba1aea46ce6075e53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb15acf58-fa", "ovs_interfaceid": "b15acf58-fac9-4e30-aaf1-9b212850bf3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1116.331442] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:69:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ed8a78a1-87dc-488e-a092-afd1c2a2ddde', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b15acf58-fac9-4e30-aaf1-9b212850bf3e', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1116.344074] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1116.347144] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1116.347693] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-93c8f887-e3f5-4ce7-b1c2-b831583d53de {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.370167] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1116.370167] env[69927]: value = "task-4096562" [ 1116.370167] env[69927]: _type = "Task" [ 1116.370167] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.380347] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096562, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.437592] env[69927]: INFO nova.compute.claims [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1116.465643] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9c14e35c-b087-4ef3-aa4a-73e06b3f0b12 tempest-ServerTagsTestJSON-1641259867 tempest-ServerTagsTestJSON-1641259867-project-member] Lock "2ae5fcf7-3111-4e80-80b0-f9c1cece1001" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.541s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1116.547191] env[69927]: DEBUG oslo_vmware.api [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096561, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.638025] env[69927]: DEBUG oslo_vmware.api [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.696367] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "a2b1684f-82af-42fc-925e-db36f31cfe63" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.696367] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "a2b1684f-82af-42fc-925e-db36f31cfe63" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.696367] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "a2b1684f-82af-42fc-925e-db36f31cfe63-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.696367] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "a2b1684f-82af-42fc-925e-db36f31cfe63-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.696367] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "a2b1684f-82af-42fc-925e-db36f31cfe63-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1116.697771] env[69927]: INFO nova.compute.manager [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Terminating instance [ 1116.718855] env[69927]: DEBUG oslo_concurrency.lockutils [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.737046] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.772116] env[69927]: INFO nova.compute.manager [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Took 33.68 seconds to build instance. [ 1116.817107] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.880810] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096562, 'name': CreateVM_Task, 'duration_secs': 0.35613} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.881645] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1116.882415] env[69927]: DEBUG oslo_concurrency.lockutils [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1116.882589] env[69927]: DEBUG oslo_concurrency.lockutils [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1116.882977] env[69927]: DEBUG oslo_concurrency.lockutils [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1116.883525] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98426b1f-1771-48d9-a22e-26f72e11e41e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.889330] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1116.889330] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52914d73-e973-e720-73a3-a2604e6c73c5" [ 1116.889330] env[69927]: _type = "Task" [ 1116.889330] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.900626] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52914d73-e973-e720-73a3-a2604e6c73c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.948825] env[69927]: INFO nova.compute.resource_tracker [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Updating resource usage from migration c94ab390-d71f-484a-9d29-c3ecdb47d6c8 [ 1117.057176] env[69927]: DEBUG oslo_vmware.api [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096561, 'name': CloneVM_Task, 'duration_secs': 1.318526} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.060439] env[69927]: INFO nova.virt.vmwareapi.vmops [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Created linked-clone VM from snapshot [ 1117.062076] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eef192f0-1d0c-42f1-ae20-045ae55f62fe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.071494] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Uploading image bcb208f6-8479-4c8e-99f0-b31903f672fe {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1117.095023] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Destroying the VM {{(pid=69927) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1117.095023] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8409f90e-8b20-4837-915f-b28103f169c8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.100087] env[69927]: DEBUG oslo_vmware.api [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1117.100087] env[69927]: value = "task-4096564" [ 1117.100087] env[69927]: _type = "Task" [ 1117.100087] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.113176] env[69927]: DEBUG oslo_vmware.api [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096564, 'name': Destroy_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.142796] env[69927]: DEBUG oslo_vmware.api [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.203564] env[69927]: DEBUG nova.compute.manager [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1117.203790] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1117.209019] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2581fc91-3bac-40fa-8cff-20cda7965c73 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.215628] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1117.220288] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e07ce2fd-c37d-4338-85a9-4c1efc680a4e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.224024] env[69927]: DEBUG nova.compute.manager [req-1a5dc926-4a20-48e5-95ac-ee7c657f9bd4 req-d7b9b01a-75fe-47fb-b139-9f98ae902bea service nova] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Received event network-changed-b15acf58-fac9-4e30-aaf1-9b212850bf3e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1117.224290] env[69927]: DEBUG nova.compute.manager [req-1a5dc926-4a20-48e5-95ac-ee7c657f9bd4 req-d7b9b01a-75fe-47fb-b139-9f98ae902bea service nova] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Refreshing instance network info cache due to event network-changed-b15acf58-fac9-4e30-aaf1-9b212850bf3e. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1117.224525] env[69927]: DEBUG oslo_concurrency.lockutils [req-1a5dc926-4a20-48e5-95ac-ee7c657f9bd4 req-d7b9b01a-75fe-47fb-b139-9f98ae902bea service nova] Acquiring lock "refresh_cache-b422d5c9-f580-4d07-9d13-af307571bf48" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.224671] env[69927]: DEBUG oslo_concurrency.lockutils [req-1a5dc926-4a20-48e5-95ac-ee7c657f9bd4 req-d7b9b01a-75fe-47fb-b139-9f98ae902bea service nova] Acquired lock "refresh_cache-b422d5c9-f580-4d07-9d13-af307571bf48" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.224832] env[69927]: DEBUG nova.network.neutron [req-1a5dc926-4a20-48e5-95ac-ee7c657f9bd4 req-d7b9b01a-75fe-47fb-b139-9f98ae902bea service nova] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Refreshing network info cache for port b15acf58-fac9-4e30-aaf1-9b212850bf3e {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1117.234323] env[69927]: DEBUG oslo_vmware.api [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1117.234323] env[69927]: value = "task-4096565" [ 1117.234323] env[69927]: _type = "Task" [ 1117.234323] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.252815] env[69927]: DEBUG oslo_vmware.api [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096565, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.275572] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adf63286-2f3f-45c4-be93-f068b21b0f2d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.280023] env[69927]: DEBUG oslo_concurrency.lockutils [None req-aa4b002f-7460-4071-ad3b-1c99c814c89c tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Lock "406828cc-c6aa-4686-827d-c7c8e28ffb8e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.207s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1117.286945] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f370e020-1b8e-4295-97b1-aaa40c2dc7ca {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.336759] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ecba463-0fdf-4521-abdb-f416112eda35 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.347147] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a3fee6e-3a04-47f2-97fd-499db912169d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.376445] env[69927]: DEBUG nova.compute.provider_tree [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1117.402372] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52914d73-e973-e720-73a3-a2604e6c73c5, 'name': SearchDatastore_Task, 'duration_secs': 0.010354} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.402732] env[69927]: DEBUG oslo_concurrency.lockutils [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1117.403050] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1117.403309] env[69927]: DEBUG oslo_concurrency.lockutils [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.403468] env[69927]: DEBUG oslo_concurrency.lockutils [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.403863] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1117.404211] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2ce402b3-4f6a-451e-9a14-fcd11716625e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.415400] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1117.415598] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1117.416444] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f6d6609-12ea-4b67-8726-35c54055a4c8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.423325] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1117.423325] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f3b5bf-2bde-d2bb-5f87-d5c11639acc0" [ 1117.423325] env[69927]: _type = "Task" [ 1117.423325] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.434984] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f3b5bf-2bde-d2bb-5f87-d5c11639acc0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.611606] env[69927]: DEBUG oslo_vmware.api [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096564, 'name': Destroy_Task} progress is 33%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.641530] env[69927]: DEBUG oslo_vmware.api [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.745424] env[69927]: DEBUG oslo_vmware.api [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096565, 'name': PowerOffVM_Task, 'duration_secs': 0.253499} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.745725] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1117.746174] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1117.746174] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e5dd1416-52e0-4c4f-891e-b95e2afe94f0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.818323] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1117.818559] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1117.818745] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Deleting the datastore file [datastore1] a2b1684f-82af-42fc-925e-db36f31cfe63 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1117.819074] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f250dcb5-8683-44f0-8f42-42450e5a72f4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.826099] env[69927]: DEBUG oslo_vmware.api [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1117.826099] env[69927]: value = "task-4096567" [ 1117.826099] env[69927]: _type = "Task" [ 1117.826099] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.835608] env[69927]: DEBUG oslo_vmware.api [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096567, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.879929] env[69927]: DEBUG nova.compute.manager [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1117.880505] env[69927]: DEBUG nova.virt.hardware [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1117.880718] env[69927]: DEBUG nova.virt.hardware [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1117.880879] env[69927]: DEBUG nova.virt.hardware [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1117.881172] env[69927]: DEBUG nova.virt.hardware [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1117.882909] env[69927]: DEBUG nova.virt.hardware [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1117.882909] env[69927]: DEBUG nova.virt.hardware [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1117.882909] env[69927]: DEBUG nova.virt.hardware [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1117.882909] env[69927]: DEBUG nova.virt.hardware [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1117.882909] env[69927]: DEBUG nova.virt.hardware [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1117.882909] env[69927]: DEBUG nova.virt.hardware [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1117.882909] env[69927]: DEBUG nova.virt.hardware [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1117.883546] env[69927]: DEBUG nova.scheduler.client.report [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1117.887794] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d5cea99-b4ff-4414-9c1b-a4c5e3bf8eb7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.897980] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d6374d3-2949-427d-b1f0-57a86168cec4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.937580] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f3b5bf-2bde-d2bb-5f87-d5c11639acc0, 'name': SearchDatastore_Task, 'duration_secs': 0.011946} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.938497] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86751c5f-3320-4549-89b8-15ed5d146fb2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.945793] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1117.945793] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ba3214-c2a4-2b42-df41-650bf880c28a" [ 1117.945793] env[69927]: _type = "Task" [ 1117.945793] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.955251] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ba3214-c2a4-2b42-df41-650bf880c28a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.995022] env[69927]: DEBUG nova.compute.manager [req-aac99cca-f2fd-48b0-81db-24f797a0a62a req-26c39ffe-9a22-49d2-82f9-bfc382154eea service nova] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Received event network-vif-plugged-358ca610-8c4b-4e27-99df-97c37c69074f {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1117.995022] env[69927]: DEBUG oslo_concurrency.lockutils [req-aac99cca-f2fd-48b0-81db-24f797a0a62a req-26c39ffe-9a22-49d2-82f9-bfc382154eea service nova] Acquiring lock "a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.995022] env[69927]: DEBUG oslo_concurrency.lockutils [req-aac99cca-f2fd-48b0-81db-24f797a0a62a req-26c39ffe-9a22-49d2-82f9-bfc382154eea service nova] Lock "a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1117.995022] env[69927]: DEBUG oslo_concurrency.lockutils [req-aac99cca-f2fd-48b0-81db-24f797a0a62a req-26c39ffe-9a22-49d2-82f9-bfc382154eea service nova] Lock "a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1117.995022] env[69927]: DEBUG nova.compute.manager [req-aac99cca-f2fd-48b0-81db-24f797a0a62a req-26c39ffe-9a22-49d2-82f9-bfc382154eea service nova] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] No waiting events found dispatching network-vif-plugged-358ca610-8c4b-4e27-99df-97c37c69074f {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1117.995022] env[69927]: WARNING nova.compute.manager [req-aac99cca-f2fd-48b0-81db-24f797a0a62a req-26c39ffe-9a22-49d2-82f9-bfc382154eea service nova] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Received unexpected event network-vif-plugged-358ca610-8c4b-4e27-99df-97c37c69074f for instance with vm_state building and task_state spawning. [ 1118.047222] env[69927]: DEBUG nova.network.neutron [req-1a5dc926-4a20-48e5-95ac-ee7c657f9bd4 req-d7b9b01a-75fe-47fb-b139-9f98ae902bea service nova] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Updated VIF entry in instance network info cache for port b15acf58-fac9-4e30-aaf1-9b212850bf3e. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1118.047583] env[69927]: DEBUG nova.network.neutron [req-1a5dc926-4a20-48e5-95ac-ee7c657f9bd4 req-d7b9b01a-75fe-47fb-b139-9f98ae902bea service nova] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Updating instance_info_cache with network_info: [{"id": "b15acf58-fac9-4e30-aaf1-9b212850bf3e", "address": "fa:16:3e:95:69:23", "network": {"id": "5e9a6068-d0f1-44b2-b2a2-b87ebc0d53f2", "bridge": "br-int", "label": "tempest-ServersTestJSON-2067990706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "544f2a021144492ba1aea46ce6075e53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb15acf58-fa", "ovs_interfaceid": "b15acf58-fac9-4e30-aaf1-9b212850bf3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.113897] env[69927]: DEBUG oslo_vmware.api [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096564, 'name': Destroy_Task, 'duration_secs': 0.748809} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.114275] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Destroyed the VM [ 1118.114578] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Deleting Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1118.114997] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6a94e4af-9d90-488d-b2db-293fb26aac80 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.122608] env[69927]: DEBUG oslo_vmware.api [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1118.122608] env[69927]: value = "task-4096568" [ 1118.122608] env[69927]: _type = "Task" [ 1118.122608] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.136610] env[69927]: DEBUG oslo_vmware.api [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096568, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.143090] env[69927]: DEBUG oslo_vmware.api [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.156824] env[69927]: DEBUG nova.network.neutron [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Successfully updated port: 358ca610-8c4b-4e27-99df-97c37c69074f {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1118.338830] env[69927]: DEBUG oslo_vmware.api [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096567, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157139} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.339313] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1118.339580] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1118.339856] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1118.340120] env[69927]: INFO nova.compute.manager [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1118.340999] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1118.340999] env[69927]: DEBUG nova.compute.manager [-] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1118.340999] env[69927]: DEBUG nova.network.neutron [-] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1118.393527] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.463s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1118.393739] env[69927]: INFO nova.compute.manager [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Migrating [ 1118.402494] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 6.504s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1118.460471] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ba3214-c2a4-2b42-df41-650bf880c28a, 'name': SearchDatastore_Task, 'duration_secs': 0.011017} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.460707] env[69927]: DEBUG oslo_concurrency.lockutils [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1118.460967] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] b422d5c9-f580-4d07-9d13-af307571bf48/b422d5c9-f580-4d07-9d13-af307571bf48.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1118.461264] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6eede3db-bc1c-4e1c-96e0-4b9c1c847101 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.469753] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1118.469753] env[69927]: value = "task-4096569" [ 1118.469753] env[69927]: _type = "Task" [ 1118.469753] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.479296] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096569, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.551182] env[69927]: DEBUG oslo_concurrency.lockutils [req-1a5dc926-4a20-48e5-95ac-ee7c657f9bd4 req-d7b9b01a-75fe-47fb-b139-9f98ae902bea service nova] Releasing lock "refresh_cache-b422d5c9-f580-4d07-9d13-af307571bf48" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1118.551547] env[69927]: DEBUG nova.compute.manager [req-1a5dc926-4a20-48e5-95ac-ee7c657f9bd4 req-d7b9b01a-75fe-47fb-b139-9f98ae902bea service nova] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Received event network-vif-deleted-85825835-357a-42a3-81f4-b55d7e165b65 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1118.638777] env[69927]: DEBUG oslo_vmware.api [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096568, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.652048] env[69927]: DEBUG oslo_vmware.api [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.660260] env[69927]: DEBUG oslo_concurrency.lockutils [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Acquiring lock "refresh_cache-a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.660438] env[69927]: DEBUG oslo_concurrency.lockutils [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Acquired lock "refresh_cache-a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1118.660609] env[69927]: DEBUG nova.network.neutron [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1118.910724] env[69927]: INFO nova.compute.claims [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1118.915383] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "refresh_cache-9aa0a285-66e4-4792-bbe9-a62f76666ec6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.915383] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired lock "refresh_cache-9aa0a285-66e4-4792-bbe9-a62f76666ec6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1118.915383] env[69927]: DEBUG nova.network.neutron [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1118.982472] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096569, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.146191] env[69927]: DEBUG oslo_vmware.api [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.149637] env[69927]: DEBUG oslo_vmware.api [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096568, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.195056] env[69927]: DEBUG nova.network.neutron [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1119.342750] env[69927]: DEBUG nova.network.neutron [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Updating instance_info_cache with network_info: [{"id": "358ca610-8c4b-4e27-99df-97c37c69074f", "address": "fa:16:3e:b4:c9:1a", "network": {"id": "ef0015d9-87ee-49aa-8291-f5ca7af139c2", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1050387659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86f00841a7dc4ae9b184f19e5f847095", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82dbbfe2-640b-433f-a8e9-1566bd40fb34", "external-id": "nsx-vlan-transportzone-625", "segmentation_id": 625, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap358ca610-8c", "ovs_interfaceid": "358ca610-8c4b-4e27-99df-97c37c69074f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1119.423056] env[69927]: INFO nova.compute.resource_tracker [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Updating resource usage from migration c390783a-4fa8-4820-8f62-f91ac3968718 [ 1119.482912] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096569, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.549185} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.486054] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] b422d5c9-f580-4d07-9d13-af307571bf48/b422d5c9-f580-4d07-9d13-af307571bf48.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1119.486314] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1119.486615] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3819f72b-7f6c-4b4a-b152-7b8526bc127a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.500743] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1119.500743] env[69927]: value = "task-4096570" [ 1119.500743] env[69927]: _type = "Task" [ 1119.500743] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.516388] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096570, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.650844] env[69927]: DEBUG oslo_vmware.api [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096568, 'name': RemoveSnapshot_Task, 'duration_secs': 1.104101} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.654032] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Deleted Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1119.661993] env[69927]: DEBUG oslo_vmware.api [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096556, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.704509] env[69927]: DEBUG nova.network.neutron [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Updating instance_info_cache with network_info: [{"id": "216a398c-956e-4115-ae6d-b045d946831b", "address": "fa:16:3e:84:a5:05", "network": {"id": "625b5644-8a8f-4789-8c96-083982c1bf4c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-124597975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61b1aea0ccf049c8942ba32932412497", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap216a398c-95", "ovs_interfaceid": "216a398c-956e-4115-ae6d-b045d946831b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1119.773990] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8666a8-f458-45bb-b21c-59017175ebc0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.785670] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d18434-badb-4548-9c97-5d7f78b2c083 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.831145] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-642d1f4f-b9c4-4481-aa2e-42c561aa87f3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.840577] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-891d50ad-cd29-47f3-9849-0e9d9b101607 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.846825] env[69927]: DEBUG oslo_concurrency.lockutils [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Releasing lock "refresh_cache-a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1119.847290] env[69927]: DEBUG nova.compute.manager [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Instance network_info: |[{"id": "358ca610-8c4b-4e27-99df-97c37c69074f", "address": "fa:16:3e:b4:c9:1a", "network": {"id": "ef0015d9-87ee-49aa-8291-f5ca7af139c2", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1050387659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86f00841a7dc4ae9b184f19e5f847095", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82dbbfe2-640b-433f-a8e9-1566bd40fb34", "external-id": "nsx-vlan-transportzone-625", "segmentation_id": 625, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap358ca610-8c", "ovs_interfaceid": "358ca610-8c4b-4e27-99df-97c37c69074f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1119.847745] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:c9:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '82dbbfe2-640b-433f-a8e9-1566bd40fb34', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '358ca610-8c4b-4e27-99df-97c37c69074f', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1119.855461] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Creating folder: Project (86f00841a7dc4ae9b184f19e5f847095). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1119.856793] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2b573bf3-1bee-4bbe-9ef4-0b82a55eec47 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.867254] env[69927]: DEBUG nova.compute.provider_tree [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1119.884034] env[69927]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1119.884366] env[69927]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=69927) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1119.884652] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Folder already exists: Project (86f00841a7dc4ae9b184f19e5f847095). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1119.884969] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Creating folder: Instances. Parent ref: group-v811532. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1119.885781] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7ee0deb6-1652-4415-8673-acf54f049681 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.900026] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Created folder: Instances in parent group-v811532. [ 1119.900352] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1119.900570] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1119.900800] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4b35d314-ddcd-4824-8bf3-6cfe7372c9f6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.924108] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1119.924108] env[69927]: value = "task-4096574" [ 1119.924108] env[69927]: _type = "Task" [ 1119.924108] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.934057] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096574, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.014107] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096570, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.265505} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.014967] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1120.015955] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b57b7f5-31db-4cde-bf7f-187a2b59bba9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.043023] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] b422d5c9-f580-4d07-9d13-af307571bf48/b422d5c9-f580-4d07-9d13-af307571bf48.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1120.044625] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a081438f-0821-4916-8f99-36718d879da3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.061440] env[69927]: DEBUG nova.compute.manager [req-364b3e9f-dbec-439e-877e-eccfa9e0ec90 req-f31a6724-b6d5-4dc2-a787-f73b3c3a7778 service nova] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Received event network-changed-358ca610-8c4b-4e27-99df-97c37c69074f {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1120.061640] env[69927]: DEBUG nova.compute.manager [req-364b3e9f-dbec-439e-877e-eccfa9e0ec90 req-f31a6724-b6d5-4dc2-a787-f73b3c3a7778 service nova] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Refreshing instance network info cache due to event network-changed-358ca610-8c4b-4e27-99df-97c37c69074f. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1120.061882] env[69927]: DEBUG oslo_concurrency.lockutils [req-364b3e9f-dbec-439e-877e-eccfa9e0ec90 req-f31a6724-b6d5-4dc2-a787-f73b3c3a7778 service nova] Acquiring lock "refresh_cache-a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.062123] env[69927]: DEBUG oslo_concurrency.lockutils [req-364b3e9f-dbec-439e-877e-eccfa9e0ec90 req-f31a6724-b6d5-4dc2-a787-f73b3c3a7778 service nova] Acquired lock "refresh_cache-a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1120.062315] env[69927]: DEBUG nova.network.neutron [req-364b3e9f-dbec-439e-877e-eccfa9e0ec90 req-f31a6724-b6d5-4dc2-a787-f73b3c3a7778 service nova] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Refreshing network info cache for port 358ca610-8c4b-4e27-99df-97c37c69074f {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1120.070557] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1120.070557] env[69927]: value = "task-4096575" [ 1120.070557] env[69927]: _type = "Task" [ 1120.070557] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.080455] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096575, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.116996] env[69927]: DEBUG nova.network.neutron [-] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1120.143177] env[69927]: DEBUG oslo_vmware.api [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096556, 'name': ReconfigVM_Task, 'duration_secs': 5.919327} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.143455] env[69927]: DEBUG oslo_concurrency.lockutils [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1120.143669] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Reconfigured VM to detach interface {{(pid=69927) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1120.159125] env[69927]: WARNING nova.compute.manager [None req-02a9f2c6-89ed-4c7a-aaac-824b80a09b71 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Image not found during snapshot: nova.exception.ImageNotFound: Image bcb208f6-8479-4c8e-99f0-b31903f672fe could not be found. [ 1120.213357] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Releasing lock "refresh_cache-9aa0a285-66e4-4792-bbe9-a62f76666ec6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1120.376670] env[69927]: DEBUG nova.scheduler.client.report [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1120.437492] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096574, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.540609] env[69927]: DEBUG nova.compute.manager [req-ad0f50a6-39a9-4562-a5b5-f4eadc1b5f22 req-067b3a84-e4bd-46a0-9d23-085241e971ac service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Received event network-vif-deleted-5e16c6cd-8c81-4e94-97f0-79a4834e6c6f {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1120.540609] env[69927]: INFO nova.compute.manager [req-ad0f50a6-39a9-4562-a5b5-f4eadc1b5f22 req-067b3a84-e4bd-46a0-9d23-085241e971ac service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Neutron deleted interface 5e16c6cd-8c81-4e94-97f0-79a4834e6c6f; detaching it from the instance and deleting it from the info cache [ 1120.540781] env[69927]: DEBUG nova.network.neutron [req-ad0f50a6-39a9-4562-a5b5-f4eadc1b5f22 req-067b3a84-e4bd-46a0-9d23-085241e971ac service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Updating instance_info_cache with network_info: [{"id": "31239db7-86bd-4d24-b54f-414bd1d5a3d1", "address": "fa:16:3e:2d:be:d8", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31239db7-86", "ovs_interfaceid": "31239db7-86bd-4d24-b54f-414bd1d5a3d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0a1c6e4f-540d-4f99-b7a9-06cb3c4271b5", "address": "fa:16:3e:70:c9:50", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a1c6e4f-54", "ovs_interfaceid": "0a1c6e4f-540d-4f99-b7a9-06cb3c4271b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1120.583612] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096575, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.619504] env[69927]: INFO nova.compute.manager [-] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Took 2.28 seconds to deallocate network for instance. [ 1120.774541] env[69927]: DEBUG nova.network.neutron [req-364b3e9f-dbec-439e-877e-eccfa9e0ec90 req-f31a6724-b6d5-4dc2-a787-f73b3c3a7778 service nova] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Updated VIF entry in instance network info cache for port 358ca610-8c4b-4e27-99df-97c37c69074f. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1120.774541] env[69927]: DEBUG nova.network.neutron [req-364b3e9f-dbec-439e-877e-eccfa9e0ec90 req-f31a6724-b6d5-4dc2-a787-f73b3c3a7778 service nova] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Updating instance_info_cache with network_info: [{"id": "358ca610-8c4b-4e27-99df-97c37c69074f", "address": "fa:16:3e:b4:c9:1a", "network": {"id": "ef0015d9-87ee-49aa-8291-f5ca7af139c2", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1050387659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86f00841a7dc4ae9b184f19e5f847095", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82dbbfe2-640b-433f-a8e9-1566bd40fb34", "external-id": "nsx-vlan-transportzone-625", "segmentation_id": 625, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap358ca610-8c", "ovs_interfaceid": "358ca610-8c4b-4e27-99df-97c37c69074f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1120.888231] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.486s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.888539] env[69927]: INFO nova.compute.manager [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Migrating [ 1120.895343] env[69927]: DEBUG oslo_concurrency.lockutils [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.334s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.895601] env[69927]: DEBUG nova.objects.instance [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lazy-loading 'resources' on Instance uuid cba314de-644e-451e-8ecc-2e209d74bbce {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1120.937098] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096574, 'name': CreateVM_Task, 'duration_secs': 0.67232} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.937627] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1120.938450] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': '491ef702-7927-45b7-9214-0f0acb787cb3', 'delete_on_termination': True, 'device_type': None, 'boot_index': 0, 'guest_format': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811540', 'volume_id': '110b2a96-6541-4296-9d43-a5c1d0562ba9', 'name': 'volume-110b2a96-6541-4296-9d43-a5c1d0562ba9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3', 'attached_at': '', 'detached_at': '', 'volume_id': '110b2a96-6541-4296-9d43-a5c1d0562ba9', 'serial': '110b2a96-6541-4296-9d43-a5c1d0562ba9'}, 'mount_device': '/dev/sda', 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=69927) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1120.938680] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Root volume attach. Driver type: vmdk {{(pid=69927) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1120.939772] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1877d232-303b-42b0-b9d9-6191ac1f6de8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.948371] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe0639b-de2c-4aba-ac4f-8ac960a80571 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.956416] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c4e58dc-3c98-42aa-b85a-2813f0d40811 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.965180] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-5ef9424b-ca8b-4a7d-85ec-b4091c409517 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.973830] env[69927]: DEBUG oslo_vmware.api [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Waiting for the task: (returnval){ [ 1120.973830] env[69927]: value = "task-4096576" [ 1120.973830] env[69927]: _type = "Task" [ 1120.973830] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.980989] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "d9347f31-b908-4561-9b57-1ea79b762168" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.981307] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "d9347f31-b908-4561-9b57-1ea79b762168" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.981558] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "d9347f31-b908-4561-9b57-1ea79b762168-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.981699] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "d9347f31-b908-4561-9b57-1ea79b762168-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.981903] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "d9347f31-b908-4561-9b57-1ea79b762168-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.988094] env[69927]: INFO nova.compute.manager [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Terminating instance [ 1120.989860] env[69927]: DEBUG oslo_vmware.api [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096576, 'name': RelocateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.044347] env[69927]: DEBUG oslo_concurrency.lockutils [req-ad0f50a6-39a9-4562-a5b5-f4eadc1b5f22 req-067b3a84-e4bd-46a0-9d23-085241e971ac service nova] Acquiring lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.044467] env[69927]: DEBUG oslo_concurrency.lockutils [req-ad0f50a6-39a9-4562-a5b5-f4eadc1b5f22 req-067b3a84-e4bd-46a0-9d23-085241e971ac service nova] Acquired lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1121.045495] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06ced95b-ad49-486a-996f-68b3180ae5d5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.065344] env[69927]: DEBUG oslo_concurrency.lockutils [req-ad0f50a6-39a9-4562-a5b5-f4eadc1b5f22 req-067b3a84-e4bd-46a0-9d23-085241e971ac service nova] Releasing lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1121.065633] env[69927]: WARNING nova.compute.manager [req-ad0f50a6-39a9-4562-a5b5-f4eadc1b5f22 req-067b3a84-e4bd-46a0-9d23-085241e971ac service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Detach interface failed, port_id=5e16c6cd-8c81-4e94-97f0-79a4834e6c6f, reason: No device with interface-id 5e16c6cd-8c81-4e94-97f0-79a4834e6c6f exists on VM: nova.exception.NotFound: No device with interface-id 5e16c6cd-8c81-4e94-97f0-79a4834e6c6f exists on VM [ 1121.081448] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096575, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.128794] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1121.277029] env[69927]: DEBUG oslo_concurrency.lockutils [req-364b3e9f-dbec-439e-877e-eccfa9e0ec90 req-f31a6724-b6d5-4dc2-a787-f73b3c3a7778 service nova] Releasing lock "refresh_cache-a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1121.277455] env[69927]: DEBUG nova.compute.manager [req-364b3e9f-dbec-439e-877e-eccfa9e0ec90 req-f31a6724-b6d5-4dc2-a787-f73b3c3a7778 service nova] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Received event network-changed-7e7e497c-f90e-44b0-a0dc-e400b4b57c0c {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1121.277662] env[69927]: DEBUG nova.compute.manager [req-364b3e9f-dbec-439e-877e-eccfa9e0ec90 req-f31a6724-b6d5-4dc2-a787-f73b3c3a7778 service nova] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Refreshing instance network info cache due to event network-changed-7e7e497c-f90e-44b0-a0dc-e400b4b57c0c. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1121.278028] env[69927]: DEBUG oslo_concurrency.lockutils [req-364b3e9f-dbec-439e-877e-eccfa9e0ec90 req-f31a6724-b6d5-4dc2-a787-f73b3c3a7778 service nova] Acquiring lock "refresh_cache-406828cc-c6aa-4686-827d-c7c8e28ffb8e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.278215] env[69927]: DEBUG oslo_concurrency.lockutils [req-364b3e9f-dbec-439e-877e-eccfa9e0ec90 req-f31a6724-b6d5-4dc2-a787-f73b3c3a7778 service nova] Acquired lock "refresh_cache-406828cc-c6aa-4686-827d-c7c8e28ffb8e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1121.278465] env[69927]: DEBUG nova.network.neutron [req-364b3e9f-dbec-439e-877e-eccfa9e0ec90 req-f31a6724-b6d5-4dc2-a787-f73b3c3a7778 service nova] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Refreshing network info cache for port 7e7e497c-f90e-44b0-a0dc-e400b4b57c0c {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1121.410416] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "refresh_cache-4b7934f8-2c97-480b-8af7-f09f6819e2b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.410416] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquired lock "refresh_cache-4b7934f8-2c97-480b-8af7-f09f6819e2b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1121.410416] env[69927]: DEBUG nova.network.neutron [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1121.416535] env[69927]: DEBUG oslo_concurrency.lockutils [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "refresh_cache-20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.416881] env[69927]: DEBUG oslo_concurrency.lockutils [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "refresh_cache-20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1121.417077] env[69927]: DEBUG nova.network.neutron [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1121.485473] env[69927]: DEBUG oslo_vmware.api [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096576, 'name': RelocateVM_Task} progress is 42%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.494196] env[69927]: DEBUG nova.compute.manager [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1121.494411] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1121.497030] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-736faac2-e2ff-4b01-b3b1-b40f43a74031 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.506693] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1121.506889] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-437fe28e-871d-4f38-8ef9-62d30d9a295a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.517368] env[69927]: DEBUG oslo_vmware.api [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1121.517368] env[69927]: value = "task-4096577" [ 1121.517368] env[69927]: _type = "Task" [ 1121.517368] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.526997] env[69927]: DEBUG oslo_vmware.api [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096577, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.584763] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096575, 'name': ReconfigVM_Task, 'duration_secs': 1.338275} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.588974] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Reconfigured VM instance instance-0000005c to attach disk [datastore1] b422d5c9-f580-4d07-9d13-af307571bf48/b422d5c9-f580-4d07-9d13-af307571bf48.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1121.591127] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b6fd4651-b27a-45c3-b74d-eea724cf9248 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.600332] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1121.600332] env[69927]: value = "task-4096578" [ 1121.600332] env[69927]: _type = "Task" [ 1121.600332] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.609938] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096578, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.728716] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83085869-c722-481b-9bbd-a1272b524780 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.751501] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Updating instance '9aa0a285-66e4-4792-bbe9-a62f76666ec6' progress to 0 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1121.757251] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-205dd84d-f927-4f12-bb8e-e6090fa5fb75 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.771233] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43f329c5-e26a-444c-b9d8-9a82dfa6e941 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.812301] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0217b0a4-9f9f-4918-89ac-b1d26a103f27 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.822760] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b0dd62c-ba1e-41c5-884b-4fec279049c5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.840778] env[69927]: DEBUG nova.compute.provider_tree [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1121.985594] env[69927]: DEBUG oslo_vmware.api [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096576, 'name': RelocateVM_Task} progress is 54%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.031683] env[69927]: DEBUG oslo_vmware.api [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096577, 'name': PowerOffVM_Task, 'duration_secs': 0.295149} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.034815] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1122.035018] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1122.035308] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-21edcebe-11f2-4651-956a-23753f091686 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.109275] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096578, 'name': Rename_Task, 'duration_secs': 0.220316} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.109572] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1122.109835] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ce0f65c4-0135-4620-9dfb-0b3756ada55a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.115609] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1122.115886] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1122.116145] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Deleting the datastore file [datastore1] d9347f31-b908-4561-9b57-1ea79b762168 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1122.116423] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-87fd4f2b-8010-4d9d-9a7a-6de255a3a200 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.120878] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1122.120878] env[69927]: value = "task-4096580" [ 1122.120878] env[69927]: _type = "Task" [ 1122.120878] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.127537] env[69927]: DEBUG oslo_vmware.api [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for the task: (returnval){ [ 1122.127537] env[69927]: value = "task-4096581" [ 1122.127537] env[69927]: _type = "Task" [ 1122.127537] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.137540] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096580, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.145794] env[69927]: DEBUG oslo_vmware.api [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096581, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.162981] env[69927]: DEBUG nova.network.neutron [req-364b3e9f-dbec-439e-877e-eccfa9e0ec90 req-f31a6724-b6d5-4dc2-a787-f73b3c3a7778 service nova] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Updated VIF entry in instance network info cache for port 7e7e497c-f90e-44b0-a0dc-e400b4b57c0c. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1122.163602] env[69927]: DEBUG nova.network.neutron [req-364b3e9f-dbec-439e-877e-eccfa9e0ec90 req-f31a6724-b6d5-4dc2-a787-f73b3c3a7778 service nova] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Updating instance_info_cache with network_info: [{"id": "7e7e497c-f90e-44b0-a0dc-e400b4b57c0c", "address": "fa:16:3e:91:44:bb", "network": {"id": "59c5a413-ed97-4651-a37f-e9ed6e46972e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1832571141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fa2f3f0abc7474196dfbee4f8c09d3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4a9e02-45f1-4afb-8abb-0de26b153086", "external-id": "nsx-vlan-transportzone-336", "segmentation_id": 336, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e7e497c-f9", "ovs_interfaceid": "7e7e497c-f90e-44b0-a0dc-e400b4b57c0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.260131] env[69927]: INFO nova.network.neutron [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Port 0a1c6e4f-540d-4f99-b7a9-06cb3c4271b5 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1122.261396] env[69927]: DEBUG nova.network.neutron [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Updating instance_info_cache with network_info: [{"id": "31239db7-86bd-4d24-b54f-414bd1d5a3d1", "address": "fa:16:3e:2d:be:d8", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31239db7-86", "ovs_interfaceid": "31239db7-86bd-4d24-b54f-414bd1d5a3d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.265021] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1122.265334] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b12ed619-8ac0-42a5-8750-e4c4dcf56048 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.267793] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1122.268095] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1122.268712] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1122.268712] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1122.268712] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1122.270941] env[69927]: INFO nova.compute.manager [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Terminating instance [ 1122.281848] env[69927]: DEBUG oslo_vmware.api [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1122.281848] env[69927]: value = "task-4096582" [ 1122.281848] env[69927]: _type = "Task" [ 1122.281848] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.291044] env[69927]: DEBUG oslo_vmware.api [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096582, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.345185] env[69927]: DEBUG nova.scheduler.client.report [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1122.450089] env[69927]: DEBUG nova.network.neutron [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Updating instance_info_cache with network_info: [{"id": "af2d617c-7a43-466f-b19d-3cce0c52c836", "address": "fa:16:3e:ea:1d:36", "network": {"id": "bd90fedf-a086-42e6-9814-07044a035f6e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-951403655-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cf6bb3492c642aa9a168e484299289c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf2d617c-7a", "ovs_interfaceid": "af2d617c-7a43-466f-b19d-3cce0c52c836", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.486073] env[69927]: DEBUG oslo_vmware.api [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096576, 'name': RelocateVM_Task} progress is 65%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.569482] env[69927]: DEBUG nova.compute.manager [req-84becc5c-cab5-4ec4-8592-4c73582d11c3 req-b9f25e6d-cb9f-4bdd-98d3-aa78f5ea0a91 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Received event network-vif-deleted-0a1c6e4f-540d-4f99-b7a9-06cb3c4271b5 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1122.635376] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096580, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.641458] env[69927]: DEBUG oslo_vmware.api [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Task: {'id': task-4096581, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.249001} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.641754] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1122.641943] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1122.642134] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1122.642313] env[69927]: INFO nova.compute.manager [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1122.642564] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1122.642761] env[69927]: DEBUG nova.compute.manager [-] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1122.642855] env[69927]: DEBUG nova.network.neutron [-] [instance: d9347f31-b908-4561-9b57-1ea79b762168] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1122.666867] env[69927]: DEBUG oslo_concurrency.lockutils [req-364b3e9f-dbec-439e-877e-eccfa9e0ec90 req-f31a6724-b6d5-4dc2-a787-f73b3c3a7778 service nova] Releasing lock "refresh_cache-406828cc-c6aa-4686-827d-c7c8e28ffb8e" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1122.667243] env[69927]: DEBUG nova.compute.manager [req-364b3e9f-dbec-439e-877e-eccfa9e0ec90 req-f31a6724-b6d5-4dc2-a787-f73b3c3a7778 service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Received event network-vif-deleted-699de5cd-28f8-453d-8f0a-7856f2d6a2b0 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1122.668041] env[69927]: INFO nova.compute.manager [req-364b3e9f-dbec-439e-877e-eccfa9e0ec90 req-f31a6724-b6d5-4dc2-a787-f73b3c3a7778 service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Neutron deleted interface 699de5cd-28f8-453d-8f0a-7856f2d6a2b0; detaching it from the instance and deleting it from the info cache [ 1122.668041] env[69927]: DEBUG nova.network.neutron [req-364b3e9f-dbec-439e-877e-eccfa9e0ec90 req-f31a6724-b6d5-4dc2-a787-f73b3c3a7778 service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.763597] env[69927]: DEBUG oslo_concurrency.lockutils [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "refresh_cache-20ac32b7-51fc-40bf-a667-2aeb6c8c7648" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1122.775601] env[69927]: DEBUG nova.compute.manager [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1122.778606] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1122.778606] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d0d11d7-5272-44da-a3f6-7297b55091c7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.795855] env[69927]: DEBUG oslo_vmware.api [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096582, 'name': PowerOffVM_Task, 'duration_secs': 0.462004} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.799558] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1122.799858] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Updating instance '9aa0a285-66e4-4792-bbe9-a62f76666ec6' progress to 17 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1122.805569] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1122.809565] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6101df5a-d262-4bbc-b637-d658e852e013 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.818759] env[69927]: DEBUG oslo_vmware.api [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1122.818759] env[69927]: value = "task-4096583" [ 1122.818759] env[69927]: _type = "Task" [ 1122.818759] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.832436] env[69927]: DEBUG oslo_vmware.api [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096583, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.853969] env[69927]: DEBUG oslo_concurrency.lockutils [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.959s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1122.858965] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.128s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1122.865026] env[69927]: INFO nova.compute.claims [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1122.904501] env[69927]: INFO nova.scheduler.client.report [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Deleted allocations for instance cba314de-644e-451e-8ecc-2e209d74bbce [ 1122.954046] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Releasing lock "refresh_cache-4b7934f8-2c97-480b-8af7-f09f6819e2b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1122.987775] env[69927]: DEBUG oslo_vmware.api [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096576, 'name': RelocateVM_Task} progress is 78%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.133257] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096580, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.174383] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-793ff741-8f48-4509-b8ae-360aca5b11bd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.184038] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a1c86f5-5df1-4a97-8e36-42be7a707d80 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.218496] env[69927]: DEBUG nova.compute.manager [req-364b3e9f-dbec-439e-877e-eccfa9e0ec90 req-f31a6724-b6d5-4dc2-a787-f73b3c3a7778 service nova] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Detach interface failed, port_id=699de5cd-28f8-453d-8f0a-7856f2d6a2b0, reason: Instance a2b1684f-82af-42fc-925e-db36f31cfe63 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1123.271445] env[69927]: DEBUG oslo_concurrency.lockutils [None req-210077ad-405d-4fa0-bfdf-f49176dc11f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "interface-20ac32b7-51fc-40bf-a667-2aeb6c8c7648-5e16c6cd-8c81-4e94-97f0-79a4834e6c6f" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.773s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.310039] env[69927]: DEBUG nova.virt.hardware [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:35:01Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1123.310039] env[69927]: DEBUG nova.virt.hardware [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1123.310039] env[69927]: DEBUG nova.virt.hardware [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1123.310524] env[69927]: DEBUG nova.virt.hardware [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1123.310882] env[69927]: DEBUG nova.virt.hardware [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1123.314031] env[69927]: DEBUG nova.virt.hardware [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1123.314031] env[69927]: DEBUG nova.virt.hardware [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1123.314031] env[69927]: DEBUG nova.virt.hardware [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1123.314031] env[69927]: DEBUG nova.virt.hardware [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1123.314031] env[69927]: DEBUG nova.virt.hardware [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1123.314031] env[69927]: DEBUG nova.virt.hardware [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1123.324363] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1996cf58-fd09-47b1-bba1-681ee6487dc2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.364287] env[69927]: DEBUG oslo_vmware.api [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096583, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.369634] env[69927]: DEBUG oslo_vmware.api [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1123.369634] env[69927]: value = "task-4096585" [ 1123.369634] env[69927]: _type = "Task" [ 1123.369634] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.388565] env[69927]: DEBUG oslo_vmware.api [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096585, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.414109] env[69927]: DEBUG oslo_concurrency.lockutils [None req-367f3af6-9949-4e6d-b80e-fc31b8475702 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "cba314de-644e-451e-8ecc-2e209d74bbce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.804s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.477870] env[69927]: DEBUG nova.compute.manager [req-a8f563c9-eb09-49d1-a231-d87ee138a53e req-a3568e03-8531-4b9a-8a28-1b9937b10632 service nova] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Received event network-vif-deleted-c8607f3f-9b4c-4c0f-b6ab-590d290cf0f1 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1123.477870] env[69927]: INFO nova.compute.manager [req-a8f563c9-eb09-49d1-a231-d87ee138a53e req-a3568e03-8531-4b9a-8a28-1b9937b10632 service nova] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Neutron deleted interface c8607f3f-9b4c-4c0f-b6ab-590d290cf0f1; detaching it from the instance and deleting it from the info cache [ 1123.477870] env[69927]: DEBUG nova.network.neutron [req-a8f563c9-eb09-49d1-a231-d87ee138a53e req-a3568e03-8531-4b9a-8a28-1b9937b10632 service nova] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.488913] env[69927]: DEBUG oslo_vmware.api [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096576, 'name': RelocateVM_Task} progress is 92%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.636498] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096580, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.737031] env[69927]: DEBUG nova.network.neutron [-] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.834731] env[69927]: DEBUG oslo_vmware.api [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096583, 'name': PowerOffVM_Task, 'duration_secs': 0.909845} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.835091] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1123.836923] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1123.836923] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-28891b21-41d5-4546-8d5b-db46c9dea5b3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.888235] env[69927]: DEBUG oslo_vmware.api [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096585, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.982057] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c8a141a5-f7bd-4beb-91d4-8932b8355e4f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.990620] env[69927]: DEBUG oslo_vmware.api [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096576, 'name': RelocateVM_Task} progress is 97%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.000161] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00fe60c1-a1d2-4440-8def-aaa8a4393839 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.041319] env[69927]: DEBUG nova.compute.manager [req-a8f563c9-eb09-49d1-a231-d87ee138a53e req-a3568e03-8531-4b9a-8a28-1b9937b10632 service nova] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Detach interface failed, port_id=c8607f3f-9b4c-4c0f-b6ab-590d290cf0f1, reason: Instance d9347f31-b908-4561-9b57-1ea79b762168 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1124.134852] env[69927]: DEBUG oslo_vmware.api [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096580, 'name': PowerOnVM_Task, 'duration_secs': 1.550921} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.135260] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1124.135350] env[69927]: INFO nova.compute.manager [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Took 10.53 seconds to spawn the instance on the hypervisor. [ 1124.135539] env[69927]: DEBUG nova.compute.manager [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1124.136446] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-348335f0-851a-47ae-89fb-15bab6943b04 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.148411] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f75e9b22-732f-453a-8b39-20345583275c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.157837] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b254edbe-c817-41b6-b50b-886c0380492c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.193813] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92773f9f-b1cd-4ff7-a80b-2617c9d532fb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.204725] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16ee0249-4ea9-4576-a499-eb4a1ad54f60 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.221662] env[69927]: DEBUG nova.compute.provider_tree [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1124.240196] env[69927]: INFO nova.compute.manager [-] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Took 1.60 seconds to deallocate network for instance. [ 1124.251909] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1124.251909] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1124.252134] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Deleting the datastore file [datastore2] 20ac32b7-51fc-40bf-a667-2aeb6c8c7648 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1124.252493] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aaa16b92-f914-479d-bcf6-f2975bbabace {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.260996] env[69927]: DEBUG oslo_vmware.api [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1124.260996] env[69927]: value = "task-4096587" [ 1124.260996] env[69927]: _type = "Task" [ 1124.260996] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.271648] env[69927]: DEBUG oslo_vmware.api [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096587, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.386822] env[69927]: DEBUG oslo_vmware.api [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096585, 'name': ReconfigVM_Task, 'duration_secs': 0.547759} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.387420] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Updating instance '9aa0a285-66e4-4792-bbe9-a62f76666ec6' progress to 33 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1124.476909] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc081d07-5109-472f-88d1-37688d733c6d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.491298] env[69927]: DEBUG oslo_vmware.api [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096576, 'name': RelocateVM_Task} progress is 97%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.507317] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Updating instance '4b7934f8-2c97-480b-8af7-f09f6819e2b6' progress to 0 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1124.660135] env[69927]: INFO nova.compute.manager [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Took 38.31 seconds to build instance. [ 1124.725714] env[69927]: DEBUG nova.scheduler.client.report [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1124.749144] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.772115] env[69927]: DEBUG oslo_vmware.api [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096587, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.358152} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.772412] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1124.772603] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1124.772784] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1124.772969] env[69927]: INFO nova.compute.manager [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Took 2.00 seconds to destroy the instance on the hypervisor. [ 1124.773250] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1124.773493] env[69927]: DEBUG nova.compute.manager [-] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1124.773595] env[69927]: DEBUG nova.network.neutron [-] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1124.855137] env[69927]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port 0a1c6e4f-540d-4f99-b7a9-06cb3c4271b5 could not be found.", "detail": ""}} {{(pid=69927) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1124.855399] env[69927]: DEBUG nova.network.neutron [-] Unable to show port 0a1c6e4f-540d-4f99-b7a9-06cb3c4271b5 as it no longer exists. {{(pid=69927) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 1124.897732] env[69927]: DEBUG nova.virt.hardware [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1124.897977] env[69927]: DEBUG nova.virt.hardware [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1124.898205] env[69927]: DEBUG nova.virt.hardware [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1124.898417] env[69927]: DEBUG nova.virt.hardware [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1124.898569] env[69927]: DEBUG nova.virt.hardware [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1124.898718] env[69927]: DEBUG nova.virt.hardware [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1124.898927] env[69927]: DEBUG nova.virt.hardware [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1124.899460] env[69927]: DEBUG nova.virt.hardware [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1124.899686] env[69927]: DEBUG nova.virt.hardware [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1124.899863] env[69927]: DEBUG nova.virt.hardware [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1124.900055] env[69927]: DEBUG nova.virt.hardware [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1124.910018] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Reconfiguring VM instance instance-00000058 to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1124.910018] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9bc958dc-cfa0-40f4-b15e-b70cc3af7af2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.934480] env[69927]: DEBUG oslo_vmware.api [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1124.934480] env[69927]: value = "task-4096588" [ 1124.934480] env[69927]: _type = "Task" [ 1124.934480] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.950177] env[69927]: DEBUG oslo_vmware.api [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096588, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.991656] env[69927]: DEBUG oslo_vmware.api [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096576, 'name': RelocateVM_Task} progress is 98%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.015376] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1125.015781] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eea01dc3-e5c3-421a-ad44-607092ecd9d4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.023346] env[69927]: DEBUG oslo_vmware.api [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1125.023346] env[69927]: value = "task-4096589" [ 1125.023346] env[69927]: _type = "Task" [ 1125.023346] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.036051] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] VM already powered off {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1125.036271] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Updating instance '4b7934f8-2c97-480b-8af7-f09f6819e2b6' progress to 17 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1125.165281] env[69927]: DEBUG oslo_concurrency.lockutils [None req-15a5c114-0a6c-4978-a12e-d1ec89eac3e7 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "b422d5c9-f580-4d07-9d13-af307571bf48" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.825s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.188067] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "2c0c2704-1ccb-4e1f-95e9-62e44b751cc1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1125.188367] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "2c0c2704-1ccb-4e1f-95e9-62e44b751cc1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.231996] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.373s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.232614] env[69927]: DEBUG nova.compute.manager [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1125.239021] env[69927]: DEBUG oslo_concurrency.lockutils [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.517s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.239021] env[69927]: DEBUG nova.objects.instance [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lazy-loading 'resources' on Instance uuid 256319c4-817d-4267-8531-a65f0f8cd0b6 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1125.449274] env[69927]: DEBUG oslo_vmware.api [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096588, 'name': ReconfigVM_Task, 'duration_secs': 0.179769} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.449274] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Reconfigured VM instance instance-00000058 to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1125.449274] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1235a57-4653-42bd-855d-19917746e99d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.475283] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] 9aa0a285-66e4-4792-bbe9-a62f76666ec6/9aa0a285-66e4-4792-bbe9-a62f76666ec6.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1125.476351] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6f30f77-1641-4eab-aa6f-f1a3cd5e25e6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.501047] env[69927]: DEBUG oslo_vmware.api [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096576, 'name': RelocateVM_Task, 'duration_secs': 4.128481} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.502450] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Volume attach. Driver type: vmdk {{(pid=69927) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1125.502710] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811540', 'volume_id': '110b2a96-6541-4296-9d43-a5c1d0562ba9', 'name': 'volume-110b2a96-6541-4296-9d43-a5c1d0562ba9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3', 'attached_at': '', 'detached_at': '', 'volume_id': '110b2a96-6541-4296-9d43-a5c1d0562ba9', 'serial': '110b2a96-6541-4296-9d43-a5c1d0562ba9'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1125.503341] env[69927]: DEBUG oslo_vmware.api [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1125.503341] env[69927]: value = "task-4096590" [ 1125.503341] env[69927]: _type = "Task" [ 1125.503341] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.503963] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d157e5a-4eff-45dc-98b9-5ba7aa033e08 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.517170] env[69927]: DEBUG oslo_vmware.api [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096590, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.529285] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf73b4f9-8aa1-4f4a-abde-253f21eb1fa7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.545736] env[69927]: DEBUG nova.virt.hardware [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:35:01Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1125.546076] env[69927]: DEBUG nova.virt.hardware [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1125.546284] env[69927]: DEBUG nova.virt.hardware [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1125.546511] env[69927]: DEBUG nova.virt.hardware [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1125.546698] env[69927]: DEBUG nova.virt.hardware [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1125.546888] env[69927]: DEBUG nova.virt.hardware [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1125.547156] env[69927]: DEBUG nova.virt.hardware [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1125.547361] env[69927]: DEBUG nova.virt.hardware [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1125.547578] env[69927]: DEBUG nova.virt.hardware [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1125.547776] env[69927]: DEBUG nova.virt.hardware [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1125.548014] env[69927]: DEBUG nova.virt.hardware [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1125.561439] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] volume-110b2a96-6541-4296-9d43-a5c1d0562ba9/volume-110b2a96-6541-4296-9d43-a5c1d0562ba9.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1125.561805] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-786169ce-eb65-425a-b0de-a2f03175c58d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.576020] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7911c3fa-5f27-48a0-a896-650353dfccab {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.595555] env[69927]: DEBUG oslo_vmware.api [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Waiting for the task: (returnval){ [ 1125.595555] env[69927]: value = "task-4096591" [ 1125.595555] env[69927]: _type = "Task" [ 1125.595555] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.597323] env[69927]: DEBUG oslo_vmware.api [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1125.597323] env[69927]: value = "task-4096592" [ 1125.597323] env[69927]: _type = "Task" [ 1125.597323] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.610056] env[69927]: DEBUG oslo_vmware.api [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096592, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.614662] env[69927]: DEBUG oslo_vmware.api [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096591, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.690406] env[69927]: DEBUG nova.compute.manager [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1125.732361] env[69927]: DEBUG nova.compute.manager [req-2b898453-0b9e-408b-b5a2-1ee4e951404a req-bbd867db-68f8-4f3a-86e3-2a25858d7f41 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Received event network-vif-deleted-31239db7-86bd-4d24-b54f-414bd1d5a3d1 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1125.732525] env[69927]: INFO nova.compute.manager [req-2b898453-0b9e-408b-b5a2-1ee4e951404a req-bbd867db-68f8-4f3a-86e3-2a25858d7f41 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Neutron deleted interface 31239db7-86bd-4d24-b54f-414bd1d5a3d1; detaching it from the instance and deleting it from the info cache [ 1125.732648] env[69927]: DEBUG nova.network.neutron [req-2b898453-0b9e-408b-b5a2-1ee4e951404a req-bbd867db-68f8-4f3a-86e3-2a25858d7f41 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1125.748020] env[69927]: DEBUG nova.compute.utils [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1125.748020] env[69927]: DEBUG nova.compute.manager [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1125.748020] env[69927]: DEBUG nova.network.neutron [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1125.864536] env[69927]: DEBUG nova.policy [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bbeef6251f004acea30513e40de6c140', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de9e293e2d1a4e179f01f60e882851b8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1125.901906] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "b422d5c9-f580-4d07-9d13-af307571bf48" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1125.902232] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "b422d5c9-f580-4d07-9d13-af307571bf48" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.902461] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "b422d5c9-f580-4d07-9d13-af307571bf48-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1125.902645] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "b422d5c9-f580-4d07-9d13-af307571bf48-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.902811] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "b422d5c9-f580-4d07-9d13-af307571bf48-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.906044] env[69927]: INFO nova.compute.manager [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Terminating instance [ 1126.026924] env[69927]: DEBUG oslo_vmware.api [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096590, 'name': ReconfigVM_Task, 'duration_secs': 0.320621} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.027395] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Reconfigured VM instance instance-00000058 to attach disk [datastore2] 9aa0a285-66e4-4792-bbe9-a62f76666ec6/9aa0a285-66e4-4792-bbe9-a62f76666ec6.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1126.028096] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Updating instance '9aa0a285-66e4-4792-bbe9-a62f76666ec6' progress to 50 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1126.049376] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05878e66-d137-4a75-a0d3-48fa016c06d9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.057464] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e3d8745-1b59-44e6-aa71-7a9cfffae3cf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.091321] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c36aa38d-69db-486d-b174-8cc6abf113a1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.103209] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3957335-3fb9-49da-8533-1150d2be3ecf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.116385] env[69927]: DEBUG oslo_vmware.api [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096592, 'name': ReconfigVM_Task, 'duration_secs': 0.178587} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.127718] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Updating instance '4b7934f8-2c97-480b-8af7-f09f6819e2b6' progress to 33 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1126.131797] env[69927]: DEBUG nova.compute.provider_tree [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1126.133202] env[69927]: DEBUG oslo_vmware.api [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096591, 'name': ReconfigVM_Task, 'duration_secs': 0.486053} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.133950] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Reconfigured VM instance instance-0000005d to attach disk [datastore1] volume-110b2a96-6541-4296-9d43-a5c1d0562ba9/volume-110b2a96-6541-4296-9d43-a5c1d0562ba9.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1126.139127] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-552df458-a692-4282-b26c-90623e7134ee {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.156518] env[69927]: DEBUG oslo_vmware.api [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Waiting for the task: (returnval){ [ 1126.156518] env[69927]: value = "task-4096593" [ 1126.156518] env[69927]: _type = "Task" [ 1126.156518] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.160788] env[69927]: DEBUG nova.network.neutron [-] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.169045] env[69927]: DEBUG oslo_vmware.api [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096593, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.222369] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1126.235148] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c778b03a-bc95-46f2-b061-6d572124b916 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.246550] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21cc15cc-c3e3-4513-897a-f5339b6b80d0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.262051] env[69927]: DEBUG nova.compute.manager [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1126.287160] env[69927]: DEBUG nova.compute.manager [req-2b898453-0b9e-408b-b5a2-1ee4e951404a req-bbd867db-68f8-4f3a-86e3-2a25858d7f41 service nova] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Detach interface failed, port_id=31239db7-86bd-4d24-b54f-414bd1d5a3d1, reason: Instance 20ac32b7-51fc-40bf-a667-2aeb6c8c7648 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1126.415929] env[69927]: DEBUG nova.compute.manager [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1126.416202] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1126.417105] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cf5b5da-fb98-4b77-9251-42e1edbd1aea {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.421816] env[69927]: DEBUG nova.network.neutron [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Successfully created port: 30a26167-3dd4-4729-be64-03c251eaaa48 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1126.426251] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1126.426512] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fce70ffb-116f-4c8c-ba1f-57b70ee3afc8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.433130] env[69927]: DEBUG oslo_vmware.api [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1126.433130] env[69927]: value = "task-4096594" [ 1126.433130] env[69927]: _type = "Task" [ 1126.433130] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.444550] env[69927]: DEBUG oslo_vmware.api [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096594, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.538767] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8447a273-5c00-4ea4-a16e-78a73a477b9a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.562894] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d8ca93-7e9b-4194-aa00-7959729fe357 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.582952] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Updating instance '9aa0a285-66e4-4792-bbe9-a62f76666ec6' progress to 67 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1126.637214] env[69927]: DEBUG nova.virt.hardware [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1126.637492] env[69927]: DEBUG nova.virt.hardware [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1126.637722] env[69927]: DEBUG nova.virt.hardware [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1126.637997] env[69927]: DEBUG nova.virt.hardware [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1126.638280] env[69927]: DEBUG nova.virt.hardware [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1126.638537] env[69927]: DEBUG nova.virt.hardware [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1126.638872] env[69927]: DEBUG nova.virt.hardware [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1126.639132] env[69927]: DEBUG nova.virt.hardware [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1126.639423] env[69927]: DEBUG nova.virt.hardware [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1126.639673] env[69927]: DEBUG nova.virt.hardware [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1126.639938] env[69927]: DEBUG nova.virt.hardware [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1126.649071] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Reconfiguring VM instance instance-0000004a to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1126.650808] env[69927]: DEBUG nova.scheduler.client.report [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1126.655655] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c71da593-c3f9-428a-9042-45e291326cee {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.679520] env[69927]: INFO nova.compute.manager [-] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Took 1.91 seconds to deallocate network for instance. [ 1126.695936] env[69927]: DEBUG oslo_vmware.api [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1126.695936] env[69927]: value = "task-4096595" [ 1126.695936] env[69927]: _type = "Task" [ 1126.695936] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.707212] env[69927]: DEBUG oslo_vmware.api [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096593, 'name': ReconfigVM_Task, 'duration_secs': 0.157608} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.708490] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811540', 'volume_id': '110b2a96-6541-4296-9d43-a5c1d0562ba9', 'name': 'volume-110b2a96-6541-4296-9d43-a5c1d0562ba9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3', 'attached_at': '', 'detached_at': '', 'volume_id': '110b2a96-6541-4296-9d43-a5c1d0562ba9', 'serial': '110b2a96-6541-4296-9d43-a5c1d0562ba9'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1126.709391] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ac3cec83-8aec-4ccd-891b-5dc572abdce5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.719063] env[69927]: DEBUG oslo_vmware.api [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096595, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.726212] env[69927]: DEBUG oslo_vmware.api [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Waiting for the task: (returnval){ [ 1126.726212] env[69927]: value = "task-4096596" [ 1126.726212] env[69927]: _type = "Task" [ 1126.726212] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.743306] env[69927]: DEBUG oslo_vmware.api [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096596, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.944544] env[69927]: DEBUG oslo_vmware.api [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096594, 'name': PowerOffVM_Task, 'duration_secs': 0.404567} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.944871] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1126.944871] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1126.946420] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-61b6bd36-a53d-4f5a-979d-7af0a60cd0c8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.046109] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1127.046495] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1127.046816] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Deleting the datastore file [datastore1] b422d5c9-f580-4d07-9d13-af307571bf48 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1127.047221] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-94f17c9f-9c55-4d45-a951-0fe8faa10d7d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.058168] env[69927]: DEBUG oslo_vmware.api [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1127.058168] env[69927]: value = "task-4096598" [ 1127.058168] env[69927]: _type = "Task" [ 1127.058168] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.068069] env[69927]: DEBUG oslo_vmware.api [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096598, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.169451] env[69927]: DEBUG nova.network.neutron [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Port 216a398c-956e-4115-ae6d-b045d946831b binding to destination host cpu-1 is already ACTIVE {{(pid=69927) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1127.182204] env[69927]: DEBUG oslo_concurrency.lockutils [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.944s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1127.188355] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.447s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1127.188355] env[69927]: INFO nova.compute.claims [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1127.195270] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1127.212805] env[69927]: DEBUG oslo_vmware.api [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096595, 'name': ReconfigVM_Task, 'duration_secs': 0.265903} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.214592] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Reconfigured VM instance instance-0000004a to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1127.214592] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa34eb4-bd77-40eb-b37d-1e2c995b2ed6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.242785] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] 4b7934f8-2c97-480b-8af7-f09f6819e2b6/4b7934f8-2c97-480b-8af7-f09f6819e2b6.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1127.245379] env[69927]: INFO nova.scheduler.client.report [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Deleted allocations for instance 256319c4-817d-4267-8531-a65f0f8cd0b6 [ 1127.253372] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9233cdfd-d2d0-43a1-bbc5-4cefb83efbaa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.284202] env[69927]: DEBUG nova.compute.manager [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1127.297601] env[69927]: DEBUG oslo_vmware.api [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096596, 'name': Rename_Task, 'duration_secs': 0.171912} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.300692] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1127.301325] env[69927]: DEBUG oslo_vmware.api [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1127.301325] env[69927]: value = "task-4096599" [ 1127.301325] env[69927]: _type = "Task" [ 1127.301325] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.301648] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5a528ee2-32ea-41ea-bc6d-b069918c6be4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.316722] env[69927]: DEBUG oslo_vmware.api [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096599, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.321090] env[69927]: DEBUG oslo_vmware.api [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Waiting for the task: (returnval){ [ 1127.321090] env[69927]: value = "task-4096600" [ 1127.321090] env[69927]: _type = "Task" [ 1127.321090] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.329501] env[69927]: DEBUG nova.virt.hardware [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1127.329872] env[69927]: DEBUG nova.virt.hardware [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1127.329995] env[69927]: DEBUG nova.virt.hardware [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1127.330254] env[69927]: DEBUG nova.virt.hardware [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1127.330348] env[69927]: DEBUG nova.virt.hardware [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1127.330442] env[69927]: DEBUG nova.virt.hardware [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1127.330678] env[69927]: DEBUG nova.virt.hardware [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1127.330871] env[69927]: DEBUG nova.virt.hardware [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1127.331036] env[69927]: DEBUG nova.virt.hardware [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1127.331536] env[69927]: DEBUG nova.virt.hardware [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1127.331536] env[69927]: DEBUG nova.virt.hardware [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1127.332339] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e7059f-e1f0-4615-afd2-c23c725065f5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.338962] env[69927]: DEBUG oslo_vmware.api [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096600, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.345467] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4418bb48-3c27-42ba-9a72-60f6ab918474 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.570949] env[69927]: DEBUG oslo_vmware.api [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096598, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.362448} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.571281] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1127.571469] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1127.571649] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1127.571870] env[69927]: INFO nova.compute.manager [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1127.572184] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1127.572513] env[69927]: DEBUG nova.compute.manager [-] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1127.572614] env[69927]: DEBUG nova.network.neutron [-] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1127.796369] env[69927]: DEBUG oslo_concurrency.lockutils [None req-18eef147-b91c-41c9-b3b3-6690cf75cf70 tempest-ServersAdminTestJSON-859816448 tempest-ServersAdminTestJSON-859816448-project-member] Lock "256319c4-817d-4267-8531-a65f0f8cd0b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.752s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1127.817311] env[69927]: DEBUG oslo_vmware.api [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096599, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.835861] env[69927]: DEBUG oslo_vmware.api [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096600, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.146926] env[69927]: DEBUG nova.compute.manager [req-74b8cf2b-a2ec-4aaa-9ef1-c5b4add4f200 req-b1d4be9c-2d59-4b58-ba3f-2ee44988d7ec service nova] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Received event network-vif-deleted-b15acf58-fac9-4e30-aaf1-9b212850bf3e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1128.147218] env[69927]: INFO nova.compute.manager [req-74b8cf2b-a2ec-4aaa-9ef1-c5b4add4f200 req-b1d4be9c-2d59-4b58-ba3f-2ee44988d7ec service nova] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Neutron deleted interface b15acf58-fac9-4e30-aaf1-9b212850bf3e; detaching it from the instance and deleting it from the info cache [ 1128.147623] env[69927]: DEBUG nova.network.neutron [req-74b8cf2b-a2ec-4aaa-9ef1-c5b4add4f200 req-b1d4be9c-2d59-4b58-ba3f-2ee44988d7ec service nova] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.202250] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "9aa0a285-66e4-4792-bbe9-a62f76666ec6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.202329] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "9aa0a285-66e4-4792-bbe9-a62f76666ec6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.202462] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "9aa0a285-66e4-4792-bbe9-a62f76666ec6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.318491] env[69927]: DEBUG oslo_vmware.api [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096599, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.337135] env[69927]: DEBUG oslo_vmware.api [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096600, 'name': PowerOnVM_Task, 'duration_secs': 0.79839} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.337135] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1128.337135] env[69927]: INFO nova.compute.manager [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Took 10.46 seconds to spawn the instance on the hypervisor. [ 1128.337135] env[69927]: DEBUG nova.compute.manager [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1128.338433] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e3cc0c-cd4c-4eb2-86e6-d01006196ebb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.504924] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14cd87e6-2bd7-4883-85c7-5370be53fdb0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.517770] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90b27300-be96-49a1-b822-3efe669aa91e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.555638] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-291b9d0f-7d35-4f29-b8f9-850ab3711139 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.565257] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc46c42-2262-478f-94f1-f6fc2b18d543 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.589629] env[69927]: DEBUG nova.compute.provider_tree [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1128.600806] env[69927]: DEBUG nova.network.neutron [-] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.633791] env[69927]: DEBUG nova.compute.manager [req-e9496150-b1a1-40f1-9633-635fc20ed5c5 req-5e78d089-cbac-4d7c-8004-3a47a9784522 service nova] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Received event network-vif-plugged-30a26167-3dd4-4729-be64-03c251eaaa48 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1128.633969] env[69927]: DEBUG oslo_concurrency.lockutils [req-e9496150-b1a1-40f1-9633-635fc20ed5c5 req-5e78d089-cbac-4d7c-8004-3a47a9784522 service nova] Acquiring lock "15c44d86-829f-4317-ab66-9e61d4fb4dd0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.634190] env[69927]: DEBUG oslo_concurrency.lockutils [req-e9496150-b1a1-40f1-9633-635fc20ed5c5 req-5e78d089-cbac-4d7c-8004-3a47a9784522 service nova] Lock "15c44d86-829f-4317-ab66-9e61d4fb4dd0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.634356] env[69927]: DEBUG oslo_concurrency.lockutils [req-e9496150-b1a1-40f1-9633-635fc20ed5c5 req-5e78d089-cbac-4d7c-8004-3a47a9784522 service nova] Lock "15c44d86-829f-4317-ab66-9e61d4fb4dd0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.634519] env[69927]: DEBUG nova.compute.manager [req-e9496150-b1a1-40f1-9633-635fc20ed5c5 req-5e78d089-cbac-4d7c-8004-3a47a9784522 service nova] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] No waiting events found dispatching network-vif-plugged-30a26167-3dd4-4729-be64-03c251eaaa48 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1128.634678] env[69927]: WARNING nova.compute.manager [req-e9496150-b1a1-40f1-9633-635fc20ed5c5 req-5e78d089-cbac-4d7c-8004-3a47a9784522 service nova] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Received unexpected event network-vif-plugged-30a26167-3dd4-4729-be64-03c251eaaa48 for instance with vm_state building and task_state spawning. [ 1128.650144] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-561b6a7f-23a0-428b-b9b9-81f7908e9ee1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.662779] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f327bde-1ed1-46dd-8d98-d0d44e6e8bb2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.697233] env[69927]: DEBUG nova.compute.manager [req-74b8cf2b-a2ec-4aaa-9ef1-c5b4add4f200 req-b1d4be9c-2d59-4b58-ba3f-2ee44988d7ec service nova] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Detach interface failed, port_id=b15acf58-fac9-4e30-aaf1-9b212850bf3e, reason: Instance b422d5c9-f580-4d07-9d13-af307571bf48 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1128.722601] env[69927]: DEBUG nova.network.neutron [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Successfully updated port: 30a26167-3dd4-4729-be64-03c251eaaa48 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1128.817066] env[69927]: DEBUG oslo_vmware.api [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096599, 'name': ReconfigVM_Task, 'duration_secs': 1.423702} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.817306] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Reconfigured VM instance instance-0000004a to attach disk [datastore2] 4b7934f8-2c97-480b-8af7-f09f6819e2b6/4b7934f8-2c97-480b-8af7-f09f6819e2b6.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1128.817586] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Updating instance '4b7934f8-2c97-480b-8af7-f09f6819e2b6' progress to 50 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1128.862460] env[69927]: INFO nova.compute.manager [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Took 33.86 seconds to build instance. [ 1129.094159] env[69927]: DEBUG nova.scheduler.client.report [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1129.104744] env[69927]: INFO nova.compute.manager [-] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Took 1.53 seconds to deallocate network for instance. [ 1129.227618] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "refresh_cache-15c44d86-829f-4317-ab66-9e61d4fb4dd0" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.227885] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquired lock "refresh_cache-15c44d86-829f-4317-ab66-9e61d4fb4dd0" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1129.227972] env[69927]: DEBUG nova.network.neutron [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1129.256546] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "refresh_cache-9aa0a285-66e4-4792-bbe9-a62f76666ec6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.256722] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired lock "refresh_cache-9aa0a285-66e4-4792-bbe9-a62f76666ec6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1129.256893] env[69927]: DEBUG nova.network.neutron [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1129.325992] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7625cdb-9e7e-42da-aca5-ed63b752d29b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.351187] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af63007b-100b-446a-96c2-4468717f9856 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.377532] env[69927]: DEBUG oslo_concurrency.lockutils [None req-343ad5c2-3b4a-4efc-b052-31623b0fe7b4 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Lock "a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.386s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1129.378088] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Updating instance '4b7934f8-2c97-480b-8af7-f09f6819e2b6' progress to 67 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1129.599944] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.415s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1129.600831] env[69927]: DEBUG nova.compute.manager [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1129.605560] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.788s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1129.608179] env[69927]: INFO nova.compute.claims [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1129.616026] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1129.777038] env[69927]: DEBUG nova.network.neutron [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1129.956730] env[69927]: DEBUG nova.network.neutron [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Port af2d617c-7a43-466f-b19d-3cce0c52c836 binding to destination host cpu-1 is already ACTIVE {{(pid=69927) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1129.959112] env[69927]: DEBUG nova.network.neutron [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Updating instance_info_cache with network_info: [{"id": "30a26167-3dd4-4729-be64-03c251eaaa48", "address": "fa:16:3e:0e:18:44", "network": {"id": "527076e8-f800-4686-883d-e70629d8ba0d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-704161827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de9e293e2d1a4e179f01f60e882851b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30a26167-3d", "ovs_interfaceid": "30a26167-3dd4-4729-be64-03c251eaaa48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.117023] env[69927]: DEBUG nova.compute.utils [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1130.118854] env[69927]: DEBUG nova.compute.manager [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1130.119064] env[69927]: DEBUG nova.network.neutron [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1130.127794] env[69927]: DEBUG nova.network.neutron [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Updating instance_info_cache with network_info: [{"id": "216a398c-956e-4115-ae6d-b045d946831b", "address": "fa:16:3e:84:a5:05", "network": {"id": "625b5644-8a8f-4789-8c96-083982c1bf4c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-124597975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61b1aea0ccf049c8942ba32932412497", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap216a398c-95", "ovs_interfaceid": "216a398c-956e-4115-ae6d-b045d946831b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.181667] env[69927]: DEBUG nova.compute.manager [req-3830323f-788c-4ef1-aebf-6bb628836c60 req-32498834-f1b3-458c-b0c3-e1c92030a7c3 service nova] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Received event network-changed-358ca610-8c4b-4e27-99df-97c37c69074f {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1130.181667] env[69927]: DEBUG nova.compute.manager [req-3830323f-788c-4ef1-aebf-6bb628836c60 req-32498834-f1b3-458c-b0c3-e1c92030a7c3 service nova] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Refreshing instance network info cache due to event network-changed-358ca610-8c4b-4e27-99df-97c37c69074f. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1130.181667] env[69927]: DEBUG oslo_concurrency.lockutils [req-3830323f-788c-4ef1-aebf-6bb628836c60 req-32498834-f1b3-458c-b0c3-e1c92030a7c3 service nova] Acquiring lock "refresh_cache-a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.181667] env[69927]: DEBUG oslo_concurrency.lockutils [req-3830323f-788c-4ef1-aebf-6bb628836c60 req-32498834-f1b3-458c-b0c3-e1c92030a7c3 service nova] Acquired lock "refresh_cache-a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1130.182779] env[69927]: DEBUG nova.network.neutron [req-3830323f-788c-4ef1-aebf-6bb628836c60 req-32498834-f1b3-458c-b0c3-e1c92030a7c3 service nova] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Refreshing network info cache for port 358ca610-8c4b-4e27-99df-97c37c69074f {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1130.201192] env[69927]: DEBUG nova.policy [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '181ec10b2e4b4f1794294d18313a5918', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '71a47794e5824701925ad4bdc3651196', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1130.466481] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Releasing lock "refresh_cache-15c44d86-829f-4317-ab66-9e61d4fb4dd0" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1130.466786] env[69927]: DEBUG nova.compute.manager [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Instance network_info: |[{"id": "30a26167-3dd4-4729-be64-03c251eaaa48", "address": "fa:16:3e:0e:18:44", "network": {"id": "527076e8-f800-4686-883d-e70629d8ba0d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-704161827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de9e293e2d1a4e179f01f60e882851b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30a26167-3d", "ovs_interfaceid": "30a26167-3dd4-4729-be64-03c251eaaa48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1130.467218] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:18:44', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ce62383-8e84-4e26-955b-74c11392f4c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '30a26167-3dd4-4729-be64-03c251eaaa48', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1130.483703] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Creating folder: Project (de9e293e2d1a4e179f01f60e882851b8). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1130.484499] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e2025d13-ba8a-470d-9164-e4ddced569c5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.499659] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Created folder: Project (de9e293e2d1a4e179f01f60e882851b8) in parent group-v811283. [ 1130.499659] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Creating folder: Instances. Parent ref: group-v811555. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1130.500345] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-15f16c9d-978b-4fcf-ab61-38a8335e0bc2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.512382] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Created folder: Instances in parent group-v811555. [ 1130.512656] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1130.512866] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1130.513106] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e2264695-51a1-43a3-8b48-71f6c29bf207 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.546593] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1130.546593] env[69927]: value = "task-4096603" [ 1130.546593] env[69927]: _type = "Task" [ 1130.546593] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.557929] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096603, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.619841] env[69927]: DEBUG nova.compute.manager [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1130.636754] env[69927]: DEBUG oslo_concurrency.lockutils [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Releasing lock "refresh_cache-9aa0a285-66e4-4792-bbe9-a62f76666ec6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1130.741773] env[69927]: DEBUG nova.compute.manager [req-26fbded6-abc5-4953-a94e-35be23c05cfb req-ffbe2dfb-3610-4a31-83b9-c6ef027f662b service nova] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Received event network-changed-30a26167-3dd4-4729-be64-03c251eaaa48 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1130.742111] env[69927]: DEBUG nova.compute.manager [req-26fbded6-abc5-4953-a94e-35be23c05cfb req-ffbe2dfb-3610-4a31-83b9-c6ef027f662b service nova] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Refreshing instance network info cache due to event network-changed-30a26167-3dd4-4729-be64-03c251eaaa48. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1130.742406] env[69927]: DEBUG oslo_concurrency.lockutils [req-26fbded6-abc5-4953-a94e-35be23c05cfb req-ffbe2dfb-3610-4a31-83b9-c6ef027f662b service nova] Acquiring lock "refresh_cache-15c44d86-829f-4317-ab66-9e61d4fb4dd0" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.742697] env[69927]: DEBUG oslo_concurrency.lockutils [req-26fbded6-abc5-4953-a94e-35be23c05cfb req-ffbe2dfb-3610-4a31-83b9-c6ef027f662b service nova] Acquired lock "refresh_cache-15c44d86-829f-4317-ab66-9e61d4fb4dd0" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1130.743191] env[69927]: DEBUG nova.network.neutron [req-26fbded6-abc5-4953-a94e-35be23c05cfb req-ffbe2dfb-3610-4a31-83b9-c6ef027f662b service nova] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Refreshing network info cache for port 30a26167-3dd4-4729-be64-03c251eaaa48 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1130.839764] env[69927]: DEBUG nova.network.neutron [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Successfully created port: 9251f381-f500-4fb3-a407-0020e12af1ec {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1130.950513] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62107b7b-7215-404d-82b7-30526152ece4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.960191] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870ff15f-41e0-4511-a301-4a73a756c2cb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.010870] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-358f29c3-2bf7-4118-801d-8b0b38c518d7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.022042] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "4b7934f8-2c97-480b-8af7-f09f6819e2b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1131.022533] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "4b7934f8-2c97-480b-8af7-f09f6819e2b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1131.022835] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "4b7934f8-2c97-480b-8af7-f09f6819e2b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1131.031022] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f46a623-b315-42b8-95ca-5698865c8b9a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.049115] env[69927]: DEBUG nova.compute.provider_tree [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1131.060756] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096603, 'name': CreateVM_Task, 'duration_secs': 0.339265} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.061437] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1131.062279] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.062565] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1131.063549] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1131.063803] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01926ea4-ef7a-45cf-995a-13cc2bf88b51 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.070100] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1131.070100] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52209dea-ec08-b58a-44c4-0225081d90a2" [ 1131.070100] env[69927]: _type = "Task" [ 1131.070100] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.082695] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52209dea-ec08-b58a-44c4-0225081d90a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.131910] env[69927]: DEBUG nova.network.neutron [req-3830323f-788c-4ef1-aebf-6bb628836c60 req-32498834-f1b3-458c-b0c3-e1c92030a7c3 service nova] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Updated VIF entry in instance network info cache for port 358ca610-8c4b-4e27-99df-97c37c69074f. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1131.132371] env[69927]: DEBUG nova.network.neutron [req-3830323f-788c-4ef1-aebf-6bb628836c60 req-32498834-f1b3-458c-b0c3-e1c92030a7c3 service nova] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Updating instance_info_cache with network_info: [{"id": "358ca610-8c4b-4e27-99df-97c37c69074f", "address": "fa:16:3e:b4:c9:1a", "network": {"id": "ef0015d9-87ee-49aa-8291-f5ca7af139c2", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1050387659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.174", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86f00841a7dc4ae9b184f19e5f847095", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82dbbfe2-640b-433f-a8e9-1566bd40fb34", "external-id": "nsx-vlan-transportzone-625", "segmentation_id": 625, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap358ca610-8c", "ovs_interfaceid": "358ca610-8c4b-4e27-99df-97c37c69074f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.172329] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25917e3-e671-486e-a923-e7b65203278b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.194254] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af77a06-2426-4845-a33b-7b1999d6f189 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.203736] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Updating instance '9aa0a285-66e4-4792-bbe9-a62f76666ec6' progress to 83 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1131.556701] env[69927]: DEBUG nova.scheduler.client.report [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1131.585203] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52209dea-ec08-b58a-44c4-0225081d90a2, 'name': SearchDatastore_Task, 'duration_secs': 0.012875} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.588259] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1131.588804] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1131.589126] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.589325] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1131.589540] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1131.590324] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95bf2e20-a79c-4134-b34e-dd83404689bf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.600783] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1131.600976] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1131.601716] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acb5f460-b073-4717-aee3-3061a40b8c82 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.609040] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1131.609040] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528a9600-092c-5000-4e8f-7d9a93f4d1fc" [ 1131.609040] env[69927]: _type = "Task" [ 1131.609040] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.619239] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528a9600-092c-5000-4e8f-7d9a93f4d1fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.630948] env[69927]: DEBUG nova.compute.manager [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1131.638307] env[69927]: DEBUG oslo_concurrency.lockutils [req-3830323f-788c-4ef1-aebf-6bb628836c60 req-32498834-f1b3-458c-b0c3-e1c92030a7c3 service nova] Releasing lock "refresh_cache-a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1131.666620] env[69927]: DEBUG nova.virt.hardware [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1131.666921] env[69927]: DEBUG nova.virt.hardware [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1131.667203] env[69927]: DEBUG nova.virt.hardware [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1131.667441] env[69927]: DEBUG nova.virt.hardware [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1131.667648] env[69927]: DEBUG nova.virt.hardware [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1131.667790] env[69927]: DEBUG nova.virt.hardware [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1131.668065] env[69927]: DEBUG nova.virt.hardware [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1131.668274] env[69927]: DEBUG nova.virt.hardware [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1131.668476] env[69927]: DEBUG nova.virt.hardware [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1131.668672] env[69927]: DEBUG nova.virt.hardware [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1131.668876] env[69927]: DEBUG nova.virt.hardware [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1131.669770] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1699e05c-ba32-4d44-abea-b808940a63fe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.678612] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2dd1fac-0536-4858-bd1b-aad33293eedf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.711407] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1131.713032] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-22cc6199-f826-442a-a6de-d1ce3621fee8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.721317] env[69927]: DEBUG oslo_vmware.api [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1131.721317] env[69927]: value = "task-4096604" [ 1131.721317] env[69927]: _type = "Task" [ 1131.721317] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.730371] env[69927]: DEBUG oslo_vmware.api [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096604, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.775984] env[69927]: DEBUG nova.network.neutron [req-26fbded6-abc5-4953-a94e-35be23c05cfb req-ffbe2dfb-3610-4a31-83b9-c6ef027f662b service nova] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Updated VIF entry in instance network info cache for port 30a26167-3dd4-4729-be64-03c251eaaa48. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1131.776420] env[69927]: DEBUG nova.network.neutron [req-26fbded6-abc5-4953-a94e-35be23c05cfb req-ffbe2dfb-3610-4a31-83b9-c6ef027f662b service nova] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Updating instance_info_cache with network_info: [{"id": "30a26167-3dd4-4729-be64-03c251eaaa48", "address": "fa:16:3e:0e:18:44", "network": {"id": "527076e8-f800-4686-883d-e70629d8ba0d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-704161827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de9e293e2d1a4e179f01f60e882851b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30a26167-3d", "ovs_interfaceid": "30a26167-3dd4-4729-be64-03c251eaaa48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.062997] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.458s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.063647] env[69927]: DEBUG nova.compute.manager [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1132.067569] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "refresh_cache-4b7934f8-2c97-480b-8af7-f09f6819e2b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.067894] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquired lock "refresh_cache-4b7934f8-2c97-480b-8af7-f09f6819e2b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1132.068147] env[69927]: DEBUG nova.network.neutron [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1132.069372] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.941s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.069531] env[69927]: DEBUG nova.objects.instance [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lazy-loading 'resources' on Instance uuid a2b1684f-82af-42fc-925e-db36f31cfe63 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1132.119367] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528a9600-092c-5000-4e8f-7d9a93f4d1fc, 'name': SearchDatastore_Task, 'duration_secs': 0.010213} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.120237] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7d4be15-7981-412b-ba4d-1f41f2db33e5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.128170] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1132.128170] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]527f56d6-ed93-eb1c-894c-5ef4dd762746" [ 1132.128170] env[69927]: _type = "Task" [ 1132.128170] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.137740] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]527f56d6-ed93-eb1c-894c-5ef4dd762746, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.233239] env[69927]: DEBUG oslo_vmware.api [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096604, 'name': PowerOnVM_Task, 'duration_secs': 0.416312} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.233995] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1132.233995] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-acd30405-bc10-4eec-9882-be87fb13fb94 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Updating instance '9aa0a285-66e4-4792-bbe9-a62f76666ec6' progress to 100 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1132.282735] env[69927]: DEBUG oslo_concurrency.lockutils [req-26fbded6-abc5-4953-a94e-35be23c05cfb req-ffbe2dfb-3610-4a31-83b9-c6ef027f662b service nova] Releasing lock "refresh_cache-15c44d86-829f-4317-ab66-9e61d4fb4dd0" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1132.528995] env[69927]: DEBUG nova.network.neutron [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Successfully updated port: 9251f381-f500-4fb3-a407-0020e12af1ec {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1132.538225] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquiring lock "cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1132.538513] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.538692] env[69927]: INFO nova.compute.manager [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Shelving [ 1132.575772] env[69927]: DEBUG nova.compute.utils [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1132.581529] env[69927]: DEBUG nova.compute.manager [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1132.581604] env[69927]: DEBUG nova.network.neutron [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1132.644338] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]527f56d6-ed93-eb1c-894c-5ef4dd762746, 'name': SearchDatastore_Task, 'duration_secs': 0.011063} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.644608] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1132.644873] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 15c44d86-829f-4317-ab66-9e61d4fb4dd0/15c44d86-829f-4317-ab66-9e61d4fb4dd0.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1132.645157] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7e1914ee-0231-488f-a742-0313b48fd4c4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.649233] env[69927]: DEBUG nova.policy [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bbeef6251f004acea30513e40de6c140', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de9e293e2d1a4e179f01f60e882851b8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1132.664018] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1132.664018] env[69927]: value = "task-4096605" [ 1132.664018] env[69927]: _type = "Task" [ 1132.664018] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.676011] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096605, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.786671] env[69927]: DEBUG nova.compute.manager [req-8419e527-826f-4d82-8aac-4e516bc488c2 req-4943f4af-3741-4f58-bf67-f0ef0b2f96c7 service nova] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Received event network-vif-plugged-9251f381-f500-4fb3-a407-0020e12af1ec {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1132.786919] env[69927]: DEBUG oslo_concurrency.lockutils [req-8419e527-826f-4d82-8aac-4e516bc488c2 req-4943f4af-3741-4f58-bf67-f0ef0b2f96c7 service nova] Acquiring lock "1b22fbb0-8628-4c69-b9b4-d6d294c7458b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1132.787163] env[69927]: DEBUG oslo_concurrency.lockutils [req-8419e527-826f-4d82-8aac-4e516bc488c2 req-4943f4af-3741-4f58-bf67-f0ef0b2f96c7 service nova] Lock "1b22fbb0-8628-4c69-b9b4-d6d294c7458b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.787339] env[69927]: DEBUG oslo_concurrency.lockutils [req-8419e527-826f-4d82-8aac-4e516bc488c2 req-4943f4af-3741-4f58-bf67-f0ef0b2f96c7 service nova] Lock "1b22fbb0-8628-4c69-b9b4-d6d294c7458b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.787754] env[69927]: DEBUG nova.compute.manager [req-8419e527-826f-4d82-8aac-4e516bc488c2 req-4943f4af-3741-4f58-bf67-f0ef0b2f96c7 service nova] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] No waiting events found dispatching network-vif-plugged-9251f381-f500-4fb3-a407-0020e12af1ec {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1132.788860] env[69927]: WARNING nova.compute.manager [req-8419e527-826f-4d82-8aac-4e516bc488c2 req-4943f4af-3741-4f58-bf67-f0ef0b2f96c7 service nova] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Received unexpected event network-vif-plugged-9251f381-f500-4fb3-a407-0020e12af1ec for instance with vm_state building and task_state spawning. [ 1132.788860] env[69927]: DEBUG nova.compute.manager [req-8419e527-826f-4d82-8aac-4e516bc488c2 req-4943f4af-3741-4f58-bf67-f0ef0b2f96c7 service nova] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Received event network-changed-9251f381-f500-4fb3-a407-0020e12af1ec {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1132.788860] env[69927]: DEBUG nova.compute.manager [req-8419e527-826f-4d82-8aac-4e516bc488c2 req-4943f4af-3741-4f58-bf67-f0ef0b2f96c7 service nova] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Refreshing instance network info cache due to event network-changed-9251f381-f500-4fb3-a407-0020e12af1ec. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1132.788998] env[69927]: DEBUG oslo_concurrency.lockutils [req-8419e527-826f-4d82-8aac-4e516bc488c2 req-4943f4af-3741-4f58-bf67-f0ef0b2f96c7 service nova] Acquiring lock "refresh_cache-1b22fbb0-8628-4c69-b9b4-d6d294c7458b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.789587] env[69927]: DEBUG oslo_concurrency.lockutils [req-8419e527-826f-4d82-8aac-4e516bc488c2 req-4943f4af-3741-4f58-bf67-f0ef0b2f96c7 service nova] Acquired lock "refresh_cache-1b22fbb0-8628-4c69-b9b4-d6d294c7458b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1132.789587] env[69927]: DEBUG nova.network.neutron [req-8419e527-826f-4d82-8aac-4e516bc488c2 req-4943f4af-3741-4f58-bf67-f0ef0b2f96c7 service nova] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Refreshing network info cache for port 9251f381-f500-4fb3-a407-0020e12af1ec {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1132.867124] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c71fc42-08f8-497d-ad9c-5d7e82d57132 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.877666] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a4a0e0c-6f05-4683-a960-e41803dfbd99 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.912270] env[69927]: DEBUG nova.network.neutron [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Updating instance_info_cache with network_info: [{"id": "af2d617c-7a43-466f-b19d-3cce0c52c836", "address": "fa:16:3e:ea:1d:36", "network": {"id": "bd90fedf-a086-42e6-9814-07044a035f6e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-951403655-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cf6bb3492c642aa9a168e484299289c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf2d617c-7a", "ovs_interfaceid": "af2d617c-7a43-466f-b19d-3cce0c52c836", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.913961] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717c3e7c-eef1-4b1c-b667-160f3e689662 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.923955] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5b9b6fb-1f5d-4cde-8f58-2d89f1ed063d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.946957] env[69927]: DEBUG nova.compute.provider_tree [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1132.971084] env[69927]: DEBUG nova.network.neutron [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Successfully created port: 619d4a4d-a555-4b6b-a00b-87d1eb721427 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1133.031160] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "refresh_cache-1b22fbb0-8628-4c69-b9b4-d6d294c7458b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.082638] env[69927]: DEBUG nova.compute.manager [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1133.179369] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096605, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480174} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.179666] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 15c44d86-829f-4317-ab66-9e61d4fb4dd0/15c44d86-829f-4317-ab66-9e61d4fb4dd0.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1133.179901] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1133.180430] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2958d267-046c-4bac-b680-476c00b9ccb7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.188440] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1133.188440] env[69927]: value = "task-4096606" [ 1133.188440] env[69927]: _type = "Task" [ 1133.188440] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.197374] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096606, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.327991] env[69927]: DEBUG nova.network.neutron [req-8419e527-826f-4d82-8aac-4e516bc488c2 req-4943f4af-3741-4f58-bf67-f0ef0b2f96c7 service nova] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1133.418660] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Releasing lock "refresh_cache-4b7934f8-2c97-480b-8af7-f09f6819e2b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1133.452100] env[69927]: DEBUG nova.scheduler.client.report [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1133.459418] env[69927]: DEBUG nova.network.neutron [req-8419e527-826f-4d82-8aac-4e516bc488c2 req-4943f4af-3741-4f58-bf67-f0ef0b2f96c7 service nova] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1133.550030] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1133.550474] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8df8dd38-8a40-4d53-abc3-2a952f5bea2b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.560169] env[69927]: DEBUG oslo_vmware.api [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1133.560169] env[69927]: value = "task-4096607" [ 1133.560169] env[69927]: _type = "Task" [ 1133.560169] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.570432] env[69927]: DEBUG oslo_vmware.api [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096607, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.701737] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096606, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067752} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.702227] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1133.702909] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1e5ecfa-574a-49b8-9585-fa605bdf1261 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.726245] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 15c44d86-829f-4317-ab66-9e61d4fb4dd0/15c44d86-829f-4317-ab66-9e61d4fb4dd0.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1133.726575] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-511a22a8-ab98-4535-83da-bb91725c3849 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.748019] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1133.748019] env[69927]: value = "task-4096608" [ 1133.748019] env[69927]: _type = "Task" [ 1133.748019] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.768307] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096608, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.947380] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-756b91b0-1b1d-42d3-8fe2-c82a6a49a656 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.968356] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.899s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1133.971024] env[69927]: DEBUG oslo_concurrency.lockutils [req-8419e527-826f-4d82-8aac-4e516bc488c2 req-4943f4af-3741-4f58-bf67-f0ef0b2f96c7 service nova] Releasing lock "refresh_cache-1b22fbb0-8628-4c69-b9b4-d6d294c7458b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1133.971757] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.223s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1133.971992] env[69927]: DEBUG nova.objects.instance [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lazy-loading 'resources' on Instance uuid d9347f31-b908-4561-9b57-1ea79b762168 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1133.973051] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquired lock "refresh_cache-1b22fbb0-8628-4c69-b9b4-d6d294c7458b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1133.977025] env[69927]: DEBUG nova.network.neutron [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1133.977025] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0baa2cac-3a88-494f-83b7-268a9cf9dbe1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.984781] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Updating instance '4b7934f8-2c97-480b-8af7-f09f6819e2b6' progress to 83 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1133.993371] env[69927]: INFO nova.scheduler.client.report [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Deleted allocations for instance a2b1684f-82af-42fc-925e-db36f31cfe63 [ 1134.071226] env[69927]: DEBUG oslo_vmware.api [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096607, 'name': PowerOffVM_Task, 'duration_secs': 0.239935} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.071543] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1134.072630] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8fd9453-22d4-4cad-bd0b-decb25e45fbb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.092652] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf57ac4-895d-4bc9-b37d-e3d3df1ac519 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.096675] env[69927]: DEBUG nova.compute.manager [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1134.139254] env[69927]: DEBUG nova.virt.hardware [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1134.140783] env[69927]: DEBUG nova.virt.hardware [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1134.140783] env[69927]: DEBUG nova.virt.hardware [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1134.140783] env[69927]: DEBUG nova.virt.hardware [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1134.140783] env[69927]: DEBUG nova.virt.hardware [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1134.141063] env[69927]: DEBUG nova.virt.hardware [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1134.141405] env[69927]: DEBUG nova.virt.hardware [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1134.141679] env[69927]: DEBUG nova.virt.hardware [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1134.141965] env[69927]: DEBUG nova.virt.hardware [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1134.142289] env[69927]: DEBUG nova.virt.hardware [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1134.142594] env[69927]: DEBUG nova.virt.hardware [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1134.143957] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db56d1e-88db-4ea5-8bf5-777834c26a18 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.158207] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10352307-8523-4531-9e25-6bf953da0604 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.261494] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096608, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.388027] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3e80cf04-84ba-4c5a-bfd3-b2bf59989a18 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "9aa0a285-66e4-4792-bbe9-a62f76666ec6" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.388027] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3e80cf04-84ba-4c5a-bfd3-b2bf59989a18 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "9aa0a285-66e4-4792-bbe9-a62f76666ec6" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.388027] env[69927]: DEBUG nova.compute.manager [None req-3e80cf04-84ba-4c5a-bfd3-b2bf59989a18 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Going to confirm migration 4 {{(pid=69927) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1134.475983] env[69927]: DEBUG nova.compute.manager [req-93b0d2d7-fd4c-43ab-b9f4-af4a11d11d0e req-35096024-1770-4b6d-a806-b15907d9b848 service nova] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Received event network-vif-plugged-619d4a4d-a555-4b6b-a00b-87d1eb721427 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1134.476805] env[69927]: DEBUG oslo_concurrency.lockutils [req-93b0d2d7-fd4c-43ab-b9f4-af4a11d11d0e req-35096024-1770-4b6d-a806-b15907d9b848 service nova] Acquiring lock "cff307ed-3c8b-4126-9749-1204597cbf6c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.477191] env[69927]: DEBUG oslo_concurrency.lockutils [req-93b0d2d7-fd4c-43ab-b9f4-af4a11d11d0e req-35096024-1770-4b6d-a806-b15907d9b848 service nova] Lock "cff307ed-3c8b-4126-9749-1204597cbf6c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.477483] env[69927]: DEBUG oslo_concurrency.lockutils [req-93b0d2d7-fd4c-43ab-b9f4-af4a11d11d0e req-35096024-1770-4b6d-a806-b15907d9b848 service nova] Lock "cff307ed-3c8b-4126-9749-1204597cbf6c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1134.477759] env[69927]: DEBUG nova.compute.manager [req-93b0d2d7-fd4c-43ab-b9f4-af4a11d11d0e req-35096024-1770-4b6d-a806-b15907d9b848 service nova] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] No waiting events found dispatching network-vif-plugged-619d4a4d-a555-4b6b-a00b-87d1eb721427 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1134.478100] env[69927]: WARNING nova.compute.manager [req-93b0d2d7-fd4c-43ab-b9f4-af4a11d11d0e req-35096024-1770-4b6d-a806-b15907d9b848 service nova] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Received unexpected event network-vif-plugged-619d4a4d-a555-4b6b-a00b-87d1eb721427 for instance with vm_state building and task_state spawning. [ 1134.492239] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee8af17-63e4-40e0-9004-87be719c8f37 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Updating instance '4b7934f8-2c97-480b-8af7-f09f6819e2b6' progress to 100 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1134.504331] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bea9fedc-ee35-4b9f-9e54-b1490a8ad76a tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "a2b1684f-82af-42fc-925e-db36f31cfe63" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.808s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1134.599809] env[69927]: DEBUG nova.network.neutron [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Successfully updated port: 619d4a4d-a555-4b6b-a00b-87d1eb721427 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1134.605213] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Creating Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1134.605530] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-079018b5-698d-47b1-9818-39e2ce8f2d68 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.617271] env[69927]: DEBUG oslo_vmware.api [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1134.617271] env[69927]: value = "task-4096609" [ 1134.617271] env[69927]: _type = "Task" [ 1134.617271] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.631866] env[69927]: DEBUG oslo_vmware.api [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096609, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.732469] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca5c51e7-f50d-4ddc-a071-b5ec1c5a6837 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.742419] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be388407-c9ce-4fea-b2bf-23382d102753 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.785824] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0319d84d-e0f4-42fb-91ef-ca4b4f7b685e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.792623] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096608, 'name': ReconfigVM_Task, 'duration_secs': 0.736994} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.793410] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 15c44d86-829f-4317-ab66-9e61d4fb4dd0/15c44d86-829f-4317-ab66-9e61d4fb4dd0.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1134.794093] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-678a119a-c1a5-41e7-92ae-de3baf19ef96 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.799980] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a948052e-4de8-4634-8221-7a02467c0f0a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.808974] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1134.808974] env[69927]: value = "task-4096610" [ 1134.808974] env[69927]: _type = "Task" [ 1134.808974] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.822591] env[69927]: DEBUG nova.compute.provider_tree [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1134.830935] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096610, 'name': Rename_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.834163] env[69927]: DEBUG nova.network.neutron [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1134.954364] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3e80cf04-84ba-4c5a-bfd3-b2bf59989a18 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "refresh_cache-9aa0a285-66e4-4792-bbe9-a62f76666ec6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.954611] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3e80cf04-84ba-4c5a-bfd3-b2bf59989a18 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired lock "refresh_cache-9aa0a285-66e4-4792-bbe9-a62f76666ec6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1134.954850] env[69927]: DEBUG nova.network.neutron [None req-3e80cf04-84ba-4c5a-bfd3-b2bf59989a18 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1134.955090] env[69927]: DEBUG nova.objects.instance [None req-3e80cf04-84ba-4c5a-bfd3-b2bf59989a18 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lazy-loading 'info_cache' on Instance uuid 9aa0a285-66e4-4792-bbe9-a62f76666ec6 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1135.103453] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "refresh_cache-cff307ed-3c8b-4126-9749-1204597cbf6c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.103598] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquired lock "refresh_cache-cff307ed-3c8b-4126-9749-1204597cbf6c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1135.103756] env[69927]: DEBUG nova.network.neutron [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1135.133994] env[69927]: DEBUG oslo_vmware.api [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096609, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.324636] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096610, 'name': Rename_Task, 'duration_secs': 0.188194} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.325026] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1135.325437] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-46646ff7-9d2e-46e7-8d3b-6bf42b10065d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.329112] env[69927]: DEBUG nova.scheduler.client.report [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1135.343071] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1135.343071] env[69927]: value = "task-4096611" [ 1135.343071] env[69927]: _type = "Task" [ 1135.343071] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.350169] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096611, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.370295] env[69927]: DEBUG nova.network.neutron [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Updating instance_info_cache with network_info: [{"id": "9251f381-f500-4fb3-a407-0020e12af1ec", "address": "fa:16:3e:ab:22:93", "network": {"id": "095e5f02-dbfd-4d96-bd31-c8c5e870e449", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-240997183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "71a47794e5824701925ad4bdc3651196", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7b7edd0-124a-48ec-ae26-1aa14f9b884a", "external-id": "nsx-vlan-transportzone-861", "segmentation_id": 861, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9251f381-f5", "ovs_interfaceid": "9251f381-f500-4fb3-a407-0020e12af1ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.629756] env[69927]: DEBUG oslo_vmware.api [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096609, 'name': CreateSnapshot_Task, 'duration_secs': 0.903555} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.630093] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Created Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1135.630910] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea814270-eb82-422b-82c7-268679cffaf7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.648286] env[69927]: DEBUG nova.network.neutron [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1135.836380] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.864s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1135.843025] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.618s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1135.843025] env[69927]: INFO nova.compute.claims [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1135.865749] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096611, 'name': PowerOnVM_Task} progress is 81%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.876359] env[69927]: INFO nova.scheduler.client.report [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Deleted allocations for instance d9347f31-b908-4561-9b57-1ea79b762168 [ 1135.881717] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Releasing lock "refresh_cache-1b22fbb0-8628-4c69-b9b4-d6d294c7458b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1135.881775] env[69927]: DEBUG nova.compute.manager [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Instance network_info: |[{"id": "9251f381-f500-4fb3-a407-0020e12af1ec", "address": "fa:16:3e:ab:22:93", "network": {"id": "095e5f02-dbfd-4d96-bd31-c8c5e870e449", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-240997183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "71a47794e5824701925ad4bdc3651196", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7b7edd0-124a-48ec-ae26-1aa14f9b884a", "external-id": "nsx-vlan-transportzone-861", "segmentation_id": 861, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9251f381-f5", "ovs_interfaceid": "9251f381-f500-4fb3-a407-0020e12af1ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1135.882733] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:22:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b7b7edd0-124a-48ec-ae26-1aa14f9b884a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9251f381-f500-4fb3-a407-0020e12af1ec', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1135.891519] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1135.894884] env[69927]: DEBUG nova.network.neutron [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Updating instance_info_cache with network_info: [{"id": "619d4a4d-a555-4b6b-a00b-87d1eb721427", "address": "fa:16:3e:91:11:b1", "network": {"id": "527076e8-f800-4686-883d-e70629d8ba0d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-704161827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de9e293e2d1a4e179f01f60e882851b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap619d4a4d-a5", "ovs_interfaceid": "619d4a4d-a555-4b6b-a00b-87d1eb721427", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.894884] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1135.894884] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-66d19fee-5c09-424e-aa52-0e4ab7bdb218 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.920800] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1135.920800] env[69927]: value = "task-4096612" [ 1135.920800] env[69927]: _type = "Task" [ 1135.920800] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.933567] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096612, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.152020] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Creating linked-clone VM from snapshot {{(pid=69927) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1136.155553] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-69307a66-2f8b-4909-b327-aa2004cf2ed6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.167041] env[69927]: DEBUG oslo_vmware.api [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1136.167041] env[69927]: value = "task-4096613" [ 1136.167041] env[69927]: _type = "Task" [ 1136.167041] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.178921] env[69927]: DEBUG oslo_vmware.api [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096613, 'name': CloneVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.365752] env[69927]: DEBUG oslo_vmware.api [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096611, 'name': PowerOnVM_Task, 'duration_secs': 0.868769} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.365983] env[69927]: DEBUG nova.network.neutron [None req-3e80cf04-84ba-4c5a-bfd3-b2bf59989a18 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Updating instance_info_cache with network_info: [{"id": "216a398c-956e-4115-ae6d-b045d946831b", "address": "fa:16:3e:84:a5:05", "network": {"id": "625b5644-8a8f-4789-8c96-083982c1bf4c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-124597975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61b1aea0ccf049c8942ba32932412497", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap216a398c-95", "ovs_interfaceid": "216a398c-956e-4115-ae6d-b045d946831b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1136.368268] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1136.368545] env[69927]: INFO nova.compute.manager [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Took 9.08 seconds to spawn the instance on the hypervisor. [ 1136.368783] env[69927]: DEBUG nova.compute.manager [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1136.370545] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036b8926-bab6-422b-9121-a0b1e972cb4f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.393550] env[69927]: DEBUG oslo_concurrency.lockutils [None req-0a2a6d5c-eb2f-419b-a27f-3714aabb8fb6 tempest-ImagesTestJSON-210893687 tempest-ImagesTestJSON-210893687-project-member] Lock "d9347f31-b908-4561-9b57-1ea79b762168" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.411s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1136.414732] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Releasing lock "refresh_cache-cff307ed-3c8b-4126-9749-1204597cbf6c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1136.415122] env[69927]: DEBUG nova.compute.manager [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Instance network_info: |[{"id": "619d4a4d-a555-4b6b-a00b-87d1eb721427", "address": "fa:16:3e:91:11:b1", "network": {"id": "527076e8-f800-4686-883d-e70629d8ba0d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-704161827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de9e293e2d1a4e179f01f60e882851b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap619d4a4d-a5", "ovs_interfaceid": "619d4a4d-a555-4b6b-a00b-87d1eb721427", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1136.416158] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:11:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ce62383-8e84-4e26-955b-74c11392f4c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '619d4a4d-a555-4b6b-a00b-87d1eb721427', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1136.426438] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1136.427323] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1136.433334] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fa0f396e-fc14-4f78-9ea5-72d969abc1b3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.460766] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096612, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.462554] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1136.462554] env[69927]: value = "task-4096614" [ 1136.462554] env[69927]: _type = "Task" [ 1136.462554] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.473506] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096614, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.681274] env[69927]: DEBUG oslo_vmware.api [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096613, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.724471] env[69927]: DEBUG nova.compute.manager [req-c55c3ca6-962c-44e6-8f6e-7682d7f8067d req-80a76b02-8191-4d31-af2f-4d4786cf0f38 service nova] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Received event network-changed-619d4a4d-a555-4b6b-a00b-87d1eb721427 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1136.724668] env[69927]: DEBUG nova.compute.manager [req-c55c3ca6-962c-44e6-8f6e-7682d7f8067d req-80a76b02-8191-4d31-af2f-4d4786cf0f38 service nova] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Refreshing instance network info cache due to event network-changed-619d4a4d-a555-4b6b-a00b-87d1eb721427. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1136.724884] env[69927]: DEBUG oslo_concurrency.lockutils [req-c55c3ca6-962c-44e6-8f6e-7682d7f8067d req-80a76b02-8191-4d31-af2f-4d4786cf0f38 service nova] Acquiring lock "refresh_cache-cff307ed-3c8b-4126-9749-1204597cbf6c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.725039] env[69927]: DEBUG oslo_concurrency.lockutils [req-c55c3ca6-962c-44e6-8f6e-7682d7f8067d req-80a76b02-8191-4d31-af2f-4d4786cf0f38 service nova] Acquired lock "refresh_cache-cff307ed-3c8b-4126-9749-1204597cbf6c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1136.725201] env[69927]: DEBUG nova.network.neutron [req-c55c3ca6-962c-44e6-8f6e-7682d7f8067d req-80a76b02-8191-4d31-af2f-4d4786cf0f38 service nova] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Refreshing network info cache for port 619d4a4d-a555-4b6b-a00b-87d1eb721427 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1136.876514] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3e80cf04-84ba-4c5a-bfd3-b2bf59989a18 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Releasing lock "refresh_cache-9aa0a285-66e4-4792-bbe9-a62f76666ec6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1136.877832] env[69927]: DEBUG nova.objects.instance [None req-3e80cf04-84ba-4c5a-bfd3-b2bf59989a18 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lazy-loading 'migration_context' on Instance uuid 9aa0a285-66e4-4792-bbe9-a62f76666ec6 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1136.904031] env[69927]: INFO nova.compute.manager [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Took 21.21 seconds to build instance. [ 1136.959148] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096612, 'name': CreateVM_Task, 'duration_secs': 1.004303} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.959148] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1136.959148] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.959424] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1136.959666] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1136.959861] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca635565-3f70-4460-9400-5af75790f54b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.972129] env[69927]: DEBUG oslo_vmware.api [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1136.972129] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525c103a-f6f5-5a00-f705-aa27f0d6e468" [ 1136.972129] env[69927]: _type = "Task" [ 1136.972129] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.979973] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096614, 'name': CreateVM_Task, 'duration_secs': 0.466915} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.980728] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1136.981369] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.989409] env[69927]: DEBUG oslo_vmware.api [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525c103a-f6f5-5a00-f705-aa27f0d6e468, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.177224] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc89ec2-b9e9-48cf-8680-0d30703777dd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.188552] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29e6774e-2a5d-4738-9cb5-90c9813efedd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.192407] env[69927]: DEBUG oslo_vmware.api [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096613, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.223237] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b519cfd0-ea0d-4db9-8eab-82c13a32e714 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.234025] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d9d7b04-29cc-463b-8c45-1b4f2a4b3198 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.248967] env[69927]: DEBUG nova.compute.provider_tree [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1137.383820] env[69927]: DEBUG nova.objects.base [None req-3e80cf04-84ba-4c5a-bfd3-b2bf59989a18 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Object Instance<9aa0a285-66e4-4792-bbe9-a62f76666ec6> lazy-loaded attributes: info_cache,migration_context {{(pid=69927) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1137.384439] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e57c43-d12f-4c18-8c04-2aba685a5699 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.408997] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8fb1b0b8-2615-442a-abca-e740b8edfa3f tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "15c44d86-829f-4317-ab66-9e61d4fb4dd0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.738s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.411279] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fcc2c8c3-e689-4f78-b56a-0d2ab0ae2fd5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.417689] env[69927]: DEBUG oslo_vmware.api [None req-3e80cf04-84ba-4c5a-bfd3-b2bf59989a18 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1137.417689] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52dd1e0a-02e9-81ff-7785-df1922f49001" [ 1137.417689] env[69927]: _type = "Task" [ 1137.417689] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.429372] env[69927]: DEBUG oslo_vmware.api [None req-3e80cf04-84ba-4c5a-bfd3-b2bf59989a18 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52dd1e0a-02e9-81ff-7785-df1922f49001, 'name': SearchDatastore_Task, 'duration_secs': 0.007305} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.429638] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3e80cf04-84ba-4c5a-bfd3-b2bf59989a18 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.448656] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8432a4b9-e70a-4c8d-9bdf-c735a1cee27d tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "4b7934f8-2c97-480b-8af7-f09f6819e2b6" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.451038] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8432a4b9-e70a-4c8d-9bdf-c735a1cee27d tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "4b7934f8-2c97-480b-8af7-f09f6819e2b6" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.451038] env[69927]: DEBUG nova.compute.manager [None req-8432a4b9-e70a-4c8d-9bdf-c735a1cee27d tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Going to confirm migration 5 {{(pid=69927) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1137.487673] env[69927]: DEBUG oslo_vmware.api [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525c103a-f6f5-5a00-f705-aa27f0d6e468, 'name': SearchDatastore_Task, 'duration_secs': 0.020254} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.487673] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1137.487673] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1137.487673] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1137.487673] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1137.487673] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1137.487673] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1137.488290] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1137.488290] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-144eec93-26f9-4d0f-a8b5-205335e8a3d4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.490786] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06a03988-bc6a-4621-81d5-ca694dab34b0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.497064] env[69927]: DEBUG oslo_vmware.api [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1137.497064] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52951b6b-e7ef-c361-16e7-f09fc06a1e2b" [ 1137.497064] env[69927]: _type = "Task" [ 1137.497064] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.502177] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1137.502365] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1137.503582] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08542612-614a-48af-a568-7b76625e7bad {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.511162] env[69927]: DEBUG oslo_vmware.api [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52951b6b-e7ef-c361-16e7-f09fc06a1e2b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.516263] env[69927]: DEBUG oslo_vmware.api [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1137.516263] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b48ab0-4dd8-d352-1823-5419117f92b4" [ 1137.516263] env[69927]: _type = "Task" [ 1137.516263] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.531451] env[69927]: DEBUG oslo_vmware.api [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b48ab0-4dd8-d352-1823-5419117f92b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.535734] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "693a6c6b-8d1c-405e-bb17-73259e28f556" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.535973] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "693a6c6b-8d1c-405e-bb17-73259e28f556" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.683495] env[69927]: DEBUG oslo_vmware.api [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096613, 'name': CloneVM_Task} progress is 95%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.752536] env[69927]: DEBUG nova.scheduler.client.report [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1137.967909] env[69927]: DEBUG nova.network.neutron [req-c55c3ca6-962c-44e6-8f6e-7682d7f8067d req-80a76b02-8191-4d31-af2f-4d4786cf0f38 service nova] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Updated VIF entry in instance network info cache for port 619d4a4d-a555-4b6b-a00b-87d1eb721427. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1137.967909] env[69927]: DEBUG nova.network.neutron [req-c55c3ca6-962c-44e6-8f6e-7682d7f8067d req-80a76b02-8191-4d31-af2f-4d4786cf0f38 service nova] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Updating instance_info_cache with network_info: [{"id": "619d4a4d-a555-4b6b-a00b-87d1eb721427", "address": "fa:16:3e:91:11:b1", "network": {"id": "527076e8-f800-4686-883d-e70629d8ba0d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-704161827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de9e293e2d1a4e179f01f60e882851b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap619d4a4d-a5", "ovs_interfaceid": "619d4a4d-a555-4b6b-a00b-87d1eb721427", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1138.014906] env[69927]: DEBUG oslo_vmware.api [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52951b6b-e7ef-c361-16e7-f09fc06a1e2b, 'name': SearchDatastore_Task, 'duration_secs': 0.020935} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.015217] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1138.015461] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1138.015675] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1138.031587] env[69927]: DEBUG oslo_vmware.api [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b48ab0-4dd8-d352-1823-5419117f92b4, 'name': SearchDatastore_Task, 'duration_secs': 0.017382} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.031886] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1030f7ae-a4c5-44e8-9603-fc0e9c2666f6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.042527] env[69927]: DEBUG nova.compute.manager [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1138.046036] env[69927]: DEBUG oslo_vmware.api [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1138.046036] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5267b35d-a18d-c632-5aba-0db2cec50be0" [ 1138.046036] env[69927]: _type = "Task" [ 1138.046036] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.047507] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8432a4b9-e70a-4c8d-9bdf-c735a1cee27d tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "refresh_cache-4b7934f8-2c97-480b-8af7-f09f6819e2b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1138.047507] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8432a4b9-e70a-4c8d-9bdf-c735a1cee27d tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquired lock "refresh_cache-4b7934f8-2c97-480b-8af7-f09f6819e2b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1138.047656] env[69927]: DEBUG nova.network.neutron [None req-8432a4b9-e70a-4c8d-9bdf-c735a1cee27d tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1138.047740] env[69927]: DEBUG nova.objects.instance [None req-8432a4b9-e70a-4c8d-9bdf-c735a1cee27d tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lazy-loading 'info_cache' on Instance uuid 4b7934f8-2c97-480b-8af7-f09f6819e2b6 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1138.062269] env[69927]: DEBUG oslo_vmware.api [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5267b35d-a18d-c632-5aba-0db2cec50be0, 'name': SearchDatastore_Task, 'duration_secs': 0.010717} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.063424] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1138.063750] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 1b22fbb0-8628-4c69-b9b4-d6d294c7458b/1b22fbb0-8628-4c69-b9b4-d6d294c7458b.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1138.064051] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1138.064249] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1138.064472] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-05e3c275-5a88-47ca-a8cf-25d5918c90dc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.068151] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-adc8b74b-0bf1-4d1c-950d-da724cbbcb32 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.076038] env[69927]: DEBUG oslo_vmware.api [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1138.076038] env[69927]: value = "task-4096615" [ 1138.076038] env[69927]: _type = "Task" [ 1138.076038] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.077307] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1138.077629] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1138.081215] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ada6250-f266-412a-903e-4a4625d1dd20 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.090868] env[69927]: DEBUG oslo_vmware.api [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096615, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.093270] env[69927]: DEBUG oslo_vmware.api [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1138.093270] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521854ce-119c-5af4-f20f-7feff1c88adb" [ 1138.093270] env[69927]: _type = "Task" [ 1138.093270] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.102342] env[69927]: DEBUG oslo_vmware.api [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521854ce-119c-5af4-f20f-7feff1c88adb, 'name': SearchDatastore_Task, 'duration_secs': 0.010024} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.104026] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-222adc0e-edcf-4752-a7f2-2e49904bb898 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.109139] env[69927]: DEBUG oslo_vmware.api [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1138.109139] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528fdd6e-50ab-a368-1f7f-53ddc4c8e34a" [ 1138.109139] env[69927]: _type = "Task" [ 1138.109139] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.118534] env[69927]: DEBUG oslo_vmware.api [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528fdd6e-50ab-a368-1f7f-53ddc4c8e34a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.180856] env[69927]: DEBUG oslo_vmware.api [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096613, 'name': CloneVM_Task, 'duration_secs': 1.779514} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.181204] env[69927]: INFO nova.virt.vmwareapi.vmops [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Created linked-clone VM from snapshot [ 1138.182210] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-307dc3f6-b75c-412f-a826-9aaeaf9cbe63 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.190634] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Uploading image 67c6308c-617a-40e4-b8e7-aa77c737c6ac {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1138.228390] env[69927]: DEBUG oslo_vmware.rw_handles [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1138.228390] env[69927]: value = "vm-811560" [ 1138.228390] env[69927]: _type = "VirtualMachine" [ 1138.228390] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1138.228804] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-69d3a9e5-94a9-4915-9b50-30f044eed679 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.239479] env[69927]: DEBUG oslo_vmware.rw_handles [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lease: (returnval){ [ 1138.239479] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e470df-6b3a-9a7b-3f3f-0a66224fb993" [ 1138.239479] env[69927]: _type = "HttpNfcLease" [ 1138.239479] env[69927]: } obtained for exporting VM: (result){ [ 1138.239479] env[69927]: value = "vm-811560" [ 1138.239479] env[69927]: _type = "VirtualMachine" [ 1138.239479] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1138.239903] env[69927]: DEBUG oslo_vmware.api [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the lease: (returnval){ [ 1138.239903] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e470df-6b3a-9a7b-3f3f-0a66224fb993" [ 1138.239903] env[69927]: _type = "HttpNfcLease" [ 1138.239903] env[69927]: } to be ready. {{(pid=69927) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1138.248798] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1138.248798] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e470df-6b3a-9a7b-3f3f-0a66224fb993" [ 1138.248798] env[69927]: _type = "HttpNfcLease" [ 1138.248798] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1138.262159] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.422s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1138.262599] env[69927]: DEBUG nova.compute.manager [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1138.268798] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.074s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1138.269086] env[69927]: DEBUG nova.objects.instance [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lazy-loading 'resources' on Instance uuid 20ac32b7-51fc-40bf-a667-2aeb6c8c7648 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1138.472342] env[69927]: DEBUG oslo_concurrency.lockutils [req-c55c3ca6-962c-44e6-8f6e-7682d7f8067d req-80a76b02-8191-4d31-af2f-4d4786cf0f38 service nova] Releasing lock "refresh_cache-cff307ed-3c8b-4126-9749-1204597cbf6c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1138.575886] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1138.592066] env[69927]: DEBUG oslo_vmware.api [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096615, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485144} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.592066] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 1b22fbb0-8628-4c69-b9b4-d6d294c7458b/1b22fbb0-8628-4c69-b9b4-d6d294c7458b.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1138.592066] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1138.592066] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c29c443f-b718-422f-86e6-a77204559f4a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.598416] env[69927]: DEBUG oslo_vmware.api [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1138.598416] env[69927]: value = "task-4096617" [ 1138.598416] env[69927]: _type = "Task" [ 1138.598416] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.609731] env[69927]: DEBUG oslo_vmware.api [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096617, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.620505] env[69927]: DEBUG oslo_vmware.api [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528fdd6e-50ab-a368-1f7f-53ddc4c8e34a, 'name': SearchDatastore_Task, 'duration_secs': 0.009369} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.620682] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1138.620951] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] cff307ed-3c8b-4126-9749-1204597cbf6c/cff307ed-3c8b-4126-9749-1204597cbf6c.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1138.621239] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0b862feb-4f9e-4c3c-8495-861577cc2066 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.630933] env[69927]: DEBUG oslo_vmware.api [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1138.630933] env[69927]: value = "task-4096618" [ 1138.630933] env[69927]: _type = "Task" [ 1138.630933] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.640689] env[69927]: DEBUG oslo_vmware.api [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096618, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.750273] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1138.750273] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e470df-6b3a-9a7b-3f3f-0a66224fb993" [ 1138.750273] env[69927]: _type = "HttpNfcLease" [ 1138.750273] env[69927]: } is ready. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1138.750695] env[69927]: DEBUG oslo_vmware.rw_handles [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1138.750695] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e470df-6b3a-9a7b-3f3f-0a66224fb993" [ 1138.750695] env[69927]: _type = "HttpNfcLease" [ 1138.750695] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1138.752236] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c52bf8f-87ba-48da-aab2-9694e87567fd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.765845] env[69927]: DEBUG oslo_vmware.rw_handles [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52091dbf-f9ca-ceb4-c664-d058a54f643a/disk-0.vmdk from lease info. {{(pid=69927) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1138.766208] env[69927]: DEBUG oslo_vmware.rw_handles [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52091dbf-f9ca-ceb4-c664-d058a54f643a/disk-0.vmdk for reading. {{(pid=69927) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1138.833284] env[69927]: DEBUG nova.compute.utils [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1138.837342] env[69927]: DEBUG nova.compute.manager [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1138.837679] env[69927]: DEBUG nova.network.neutron [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1138.906948] env[69927]: DEBUG nova.policy [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd89d0a2232b4da1a0b88799062fe8da', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3afde63c8cbe4aecb32a470fd6b948f6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1139.059545] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1bdb0ca1-f1ba-4fad-a81c-788a95c0f916 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.120644] env[69927]: DEBUG oslo_vmware.api [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096617, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.108739} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.120644] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1139.124256] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8839e6cd-1fc2-4901-bafa-1ad1c5e5b213 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.129103] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c8e2081-3993-4467-8423-31a5affd82b4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.158345] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] 1b22fbb0-8628-4c69-b9b4-d6d294c7458b/1b22fbb0-8628-4c69-b9b4-d6d294c7458b.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1139.161659] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d02fe671-06f1-438a-94f2-cc4c4c243233 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.180731] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe6bfb1-15e2-4a71-8f03-45e44d401cd3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.184105] env[69927]: DEBUG oslo_vmware.api [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096618, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.511057} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.184849] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] cff307ed-3c8b-4126-9749-1204597cbf6c/cff307ed-3c8b-4126-9749-1204597cbf6c.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1139.185102] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1139.185897] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6c8f41db-94fa-4fb6-9031-6b5d38f5e217 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.219962] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0555340a-182c-4641-80cc-7690351c0df8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.223203] env[69927]: DEBUG oslo_vmware.api [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1139.223203] env[69927]: value = "task-4096619" [ 1139.223203] env[69927]: _type = "Task" [ 1139.223203] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.223649] env[69927]: DEBUG oslo_vmware.api [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1139.223649] env[69927]: value = "task-4096620" [ 1139.223649] env[69927]: _type = "Task" [ 1139.223649] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.239044] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd2aec6f-e2fd-4902-892c-0a33644ea1a0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.249495] env[69927]: DEBUG oslo_vmware.api [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096620, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.249838] env[69927]: DEBUG oslo_vmware.api [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096619, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.263488] env[69927]: DEBUG nova.compute.provider_tree [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1139.342555] env[69927]: DEBUG nova.compute.manager [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1139.489708] env[69927]: DEBUG nova.network.neutron [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Successfully created port: 4729724d-c9d9-4722-987c-93aac6459382 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1139.550116] env[69927]: DEBUG nova.network.neutron [None req-8432a4b9-e70a-4c8d-9bdf-c735a1cee27d tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Updating instance_info_cache with network_info: [{"id": "af2d617c-7a43-466f-b19d-3cce0c52c836", "address": "fa:16:3e:ea:1d:36", "network": {"id": "bd90fedf-a086-42e6-9814-07044a035f6e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-951403655-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cf6bb3492c642aa9a168e484299289c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf2d617c-7a", "ovs_interfaceid": "af2d617c-7a43-466f-b19d-3cce0c52c836", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1139.658388] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Acquiring lock "7ff17f1d-31fd-440b-906c-2719770a9151" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1139.658723] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Lock "7ff17f1d-31fd-440b-906c-2719770a9151" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1139.741338] env[69927]: DEBUG oslo_vmware.api [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096620, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.099218} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.745579] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1139.746279] env[69927]: DEBUG oslo_vmware.api [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096619, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.747207] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e203b1fd-8d5d-4deb-9585-9e6abdfedfdf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.777188] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] cff307ed-3c8b-4126-9749-1204597cbf6c/cff307ed-3c8b-4126-9749-1204597cbf6c.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1139.780133] env[69927]: DEBUG nova.scheduler.client.report [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1139.783564] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aac2a0c6-13ba-4b0c-9493-59ec3b81c2ac {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.823869] env[69927]: DEBUG oslo_vmware.api [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1139.823869] env[69927]: value = "task-4096621" [ 1139.823869] env[69927]: _type = "Task" [ 1139.823869] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.840292] env[69927]: DEBUG oslo_vmware.api [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096621, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.053844] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8432a4b9-e70a-4c8d-9bdf-c735a1cee27d tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Releasing lock "refresh_cache-4b7934f8-2c97-480b-8af7-f09f6819e2b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1140.053844] env[69927]: DEBUG nova.objects.instance [None req-8432a4b9-e70a-4c8d-9bdf-c735a1cee27d tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lazy-loading 'migration_context' on Instance uuid 4b7934f8-2c97-480b-8af7-f09f6819e2b6 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1140.160645] env[69927]: DEBUG nova.compute.manager [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1140.238668] env[69927]: DEBUG oslo_vmware.api [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096619, 'name': ReconfigVM_Task, 'duration_secs': 0.703359} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.239113] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Reconfigured VM instance instance-0000005f to attach disk [datastore1] 1b22fbb0-8628-4c69-b9b4-d6d294c7458b/1b22fbb0-8628-4c69-b9b4-d6d294c7458b.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1140.239818] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1f453149-b2bf-4ce2-8534-b61468e391b1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.249145] env[69927]: DEBUG oslo_vmware.api [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1140.249145] env[69927]: value = "task-4096622" [ 1140.249145] env[69927]: _type = "Task" [ 1140.249145] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.259063] env[69927]: DEBUG oslo_vmware.api [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096622, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.317486] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.047s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1140.322972] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.708s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1140.322972] env[69927]: DEBUG nova.objects.instance [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lazy-loading 'resources' on Instance uuid b422d5c9-f580-4d07-9d13-af307571bf48 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1140.336961] env[69927]: DEBUG oslo_vmware.api [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096621, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.353834] env[69927]: DEBUG nova.compute.manager [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1140.361962] env[69927]: INFO nova.scheduler.client.report [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Deleted allocations for instance 20ac32b7-51fc-40bf-a667-2aeb6c8c7648 [ 1140.404307] env[69927]: DEBUG nova.virt.hardware [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1140.405052] env[69927]: DEBUG nova.virt.hardware [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1140.407664] env[69927]: DEBUG nova.virt.hardware [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1140.409036] env[69927]: DEBUG nova.virt.hardware [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1140.409036] env[69927]: DEBUG nova.virt.hardware [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1140.409036] env[69927]: DEBUG nova.virt.hardware [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1140.409479] env[69927]: DEBUG nova.virt.hardware [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1140.411312] env[69927]: DEBUG nova.virt.hardware [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1140.411517] env[69927]: DEBUG nova.virt.hardware [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1140.411695] env[69927]: DEBUG nova.virt.hardware [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1140.413241] env[69927]: DEBUG nova.virt.hardware [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1140.416251] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59caec12-a8f4-4b4e-94c2-60ed9f8a1cf0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.428932] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91106b7b-63cd-4187-bec8-ac89e866024a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.558543] env[69927]: DEBUG nova.objects.base [None req-8432a4b9-e70a-4c8d-9bdf-c735a1cee27d tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Object Instance<4b7934f8-2c97-480b-8af7-f09f6819e2b6> lazy-loaded attributes: info_cache,migration_context {{(pid=69927) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1140.559592] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b80031-7fba-4157-8cc7-9a4348f3b761 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.582705] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-283fd9e1-bee7-4c18-a817-b48d097eeb74 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.589770] env[69927]: DEBUG oslo_vmware.api [None req-8432a4b9-e70a-4c8d-9bdf-c735a1cee27d tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1140.589770] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e224e3-95c9-fe19-0543-9a55cb955d05" [ 1140.589770] env[69927]: _type = "Task" [ 1140.589770] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.601171] env[69927]: DEBUG oslo_vmware.api [None req-8432a4b9-e70a-4c8d-9bdf-c735a1cee27d tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e224e3-95c9-fe19-0543-9a55cb955d05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.692497] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1140.760194] env[69927]: DEBUG oslo_vmware.api [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096622, 'name': Rename_Task, 'duration_secs': 0.245111} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.760692] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1140.760919] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2a7e1539-9071-45f4-ba3f-af84a66f0e3c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.768915] env[69927]: DEBUG oslo_vmware.api [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1140.768915] env[69927]: value = "task-4096623" [ 1140.768915] env[69927]: _type = "Task" [ 1140.768915] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.780362] env[69927]: DEBUG oslo_vmware.api [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096623, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.840621] env[69927]: DEBUG oslo_vmware.api [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096621, 'name': ReconfigVM_Task, 'duration_secs': 0.726349} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.841221] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Reconfigured VM instance instance-00000060 to attach disk [datastore1] cff307ed-3c8b-4126-9749-1204597cbf6c/cff307ed-3c8b-4126-9749-1204597cbf6c.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1140.842039] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7c12cdf7-b2b9-4863-9946-b252f4360d2e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.850534] env[69927]: DEBUG oslo_vmware.api [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1140.850534] env[69927]: value = "task-4096624" [ 1140.850534] env[69927]: _type = "Task" [ 1140.850534] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.863022] env[69927]: DEBUG oslo_vmware.api [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096624, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.869686] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5f77e7ec-3f2a-4a85-8484-2af98f558bfb tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "20ac32b7-51fc-40bf-a667-2aeb6c8c7648" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.601s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.109068] env[69927]: DEBUG oslo_vmware.api [None req-8432a4b9-e70a-4c8d-9bdf-c735a1cee27d tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e224e3-95c9-fe19-0543-9a55cb955d05, 'name': SearchDatastore_Task, 'duration_secs': 0.011294} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.112305] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8432a4b9-e70a-4c8d-9bdf-c735a1cee27d tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.113214] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-369b8913-d9a3-493b-958a-d85ce27c4e21 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.122079] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9940cb9-aeab-4d22-914b-f2bed28c4e80 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.175808] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03cfa3e-d67c-4e1c-aa1d-baa5b0281074 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.187902] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5ba02c2-8b89-41b3-b769-4dfc55cc2d82 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.204220] env[69927]: DEBUG nova.compute.provider_tree [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1141.281745] env[69927]: DEBUG oslo_vmware.api [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096623, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.313154] env[69927]: DEBUG nova.compute.manager [req-5ab015cc-a3b3-4349-83b7-09ee00fce57b req-75f90a72-0c09-43d6-8932-c46012f71c92 service nova] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Received event network-vif-plugged-4729724d-c9d9-4722-987c-93aac6459382 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1141.313405] env[69927]: DEBUG oslo_concurrency.lockutils [req-5ab015cc-a3b3-4349-83b7-09ee00fce57b req-75f90a72-0c09-43d6-8932-c46012f71c92 service nova] Acquiring lock "2c0c2704-1ccb-4e1f-95e9-62e44b751cc1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.313675] env[69927]: DEBUG oslo_concurrency.lockutils [req-5ab015cc-a3b3-4349-83b7-09ee00fce57b req-75f90a72-0c09-43d6-8932-c46012f71c92 service nova] Lock "2c0c2704-1ccb-4e1f-95e9-62e44b751cc1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.313883] env[69927]: DEBUG oslo_concurrency.lockutils [req-5ab015cc-a3b3-4349-83b7-09ee00fce57b req-75f90a72-0c09-43d6-8932-c46012f71c92 service nova] Lock "2c0c2704-1ccb-4e1f-95e9-62e44b751cc1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.314118] env[69927]: DEBUG nova.compute.manager [req-5ab015cc-a3b3-4349-83b7-09ee00fce57b req-75f90a72-0c09-43d6-8932-c46012f71c92 service nova] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] No waiting events found dispatching network-vif-plugged-4729724d-c9d9-4722-987c-93aac6459382 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1141.314373] env[69927]: WARNING nova.compute.manager [req-5ab015cc-a3b3-4349-83b7-09ee00fce57b req-75f90a72-0c09-43d6-8932-c46012f71c92 service nova] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Received unexpected event network-vif-plugged-4729724d-c9d9-4722-987c-93aac6459382 for instance with vm_state building and task_state spawning. [ 1141.371587] env[69927]: DEBUG oslo_vmware.api [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096624, 'name': Rename_Task, 'duration_secs': 0.212118} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.372306] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1141.372791] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4672db19-1be0-47b2-8e1f-173c6a976d6d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.383679] env[69927]: DEBUG oslo_vmware.api [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1141.383679] env[69927]: value = "task-4096625" [ 1141.383679] env[69927]: _type = "Task" [ 1141.383679] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.396874] env[69927]: DEBUG oslo_vmware.api [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096625, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.552745] env[69927]: DEBUG nova.network.neutron [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Successfully updated port: 4729724d-c9d9-4722-987c-93aac6459382 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1141.708447] env[69927]: DEBUG nova.scheduler.client.report [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1141.786125] env[69927]: DEBUG oslo_vmware.api [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096623, 'name': PowerOnVM_Task, 'duration_secs': 0.805898} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.786482] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1141.786706] env[69927]: INFO nova.compute.manager [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Took 10.16 seconds to spawn the instance on the hypervisor. [ 1141.786901] env[69927]: DEBUG nova.compute.manager [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1141.787855] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f481c21-e66a-476b-8a3a-c2a9a2923b5d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.894545] env[69927]: DEBUG oslo_vmware.api [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096625, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.057788] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "refresh_cache-2c0c2704-1ccb-4e1f-95e9-62e44b751cc1" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.057788] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired lock "refresh_cache-2c0c2704-1ccb-4e1f-95e9-62e44b751cc1" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.057788] env[69927]: DEBUG nova.network.neutron [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1142.216084] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.892s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1142.217612] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3e80cf04-84ba-4c5a-bfd3-b2bf59989a18 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 4.787s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1142.250780] env[69927]: INFO nova.scheduler.client.report [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Deleted allocations for instance b422d5c9-f580-4d07-9d13-af307571bf48 [ 1142.320786] env[69927]: INFO nova.compute.manager [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Took 25.60 seconds to build instance. [ 1142.401164] env[69927]: DEBUG oslo_vmware.api [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096625, 'name': PowerOnVM_Task, 'duration_secs': 0.638068} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.401164] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1142.401164] env[69927]: INFO nova.compute.manager [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Took 8.30 seconds to spawn the instance on the hypervisor. [ 1142.401164] env[69927]: DEBUG nova.compute.manager [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1142.401164] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e89de085-9eb5-4b62-9e12-11a93eb16c68 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.607431] env[69927]: DEBUG nova.network.neutron [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1142.759269] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a380131e-ab0c-4d17-877c-f1dd96eb681a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "b422d5c9-f580-4d07-9d13-af307571bf48" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.857s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1142.824168] env[69927]: DEBUG oslo_concurrency.lockutils [None req-39bc91ae-2ae0-4760-83ae-184d5c9a8190 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "1b22fbb0-8628-4c69-b9b4-d6d294c7458b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.126s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1142.858554] env[69927]: DEBUG nova.network.neutron [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Updating instance_info_cache with network_info: [{"id": "4729724d-c9d9-4722-987c-93aac6459382", "address": "fa:16:3e:2d:6a:78", "network": {"id": "2cac2664-2b1d-4bb6-a58a-f8e96679d038", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1945522269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3afde63c8cbe4aecb32a470fd6b948f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4729724d-c9", "ovs_interfaceid": "4729724d-c9d9-4722-987c-93aac6459382", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.926063] env[69927]: INFO nova.compute.manager [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Took 26.13 seconds to build instance. [ 1142.988424] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f454b4f7-94a5-4ab4-9251-5450a4e95d3a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.000597] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddcc8948-0626-4dd3-a533-5d2d59a40f46 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.035085] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-295e5195-004c-4df0-90b4-377e5d82fe4a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.048021] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6b67a3-244f-4417-bd2f-3541ffeb8da7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.066504] env[69927]: DEBUG nova.compute.provider_tree [None req-3e80cf04-84ba-4c5a-bfd3-b2bf59989a18 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1143.362015] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Releasing lock "refresh_cache-2c0c2704-1ccb-4e1f-95e9-62e44b751cc1" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.362534] env[69927]: DEBUG nova.compute.manager [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Instance network_info: |[{"id": "4729724d-c9d9-4722-987c-93aac6459382", "address": "fa:16:3e:2d:6a:78", "network": {"id": "2cac2664-2b1d-4bb6-a58a-f8e96679d038", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1945522269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3afde63c8cbe4aecb32a470fd6b948f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4729724d-c9", "ovs_interfaceid": "4729724d-c9d9-4722-987c-93aac6459382", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1143.363177] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:6a:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '130387c4-e4ec-4d95-8e9d-bb079baabad8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4729724d-c9d9-4722-987c-93aac6459382', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1143.376033] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1143.376461] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1143.376961] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-776c6fd7-a8a5-47ce-8e77-da849b8bc114 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.408317] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1143.408317] env[69927]: value = "task-4096626" [ 1143.408317] env[69927]: _type = "Task" [ 1143.408317] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.418778] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096626, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.432574] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6e5384f3-0193-4109-a713-fcd60049f965 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "cff307ed-3c8b-4126-9749-1204597cbf6c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.649s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1143.479489] env[69927]: DEBUG nova.compute.manager [req-1b087872-1b5d-45d3-bb30-5944beb26de4 req-d92d7686-0f61-40dd-a66e-a4c003e89f54 service nova] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Received event network-changed-4729724d-c9d9-4722-987c-93aac6459382 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1143.479622] env[69927]: DEBUG nova.compute.manager [req-1b087872-1b5d-45d3-bb30-5944beb26de4 req-d92d7686-0f61-40dd-a66e-a4c003e89f54 service nova] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Refreshing instance network info cache due to event network-changed-4729724d-c9d9-4722-987c-93aac6459382. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1143.479849] env[69927]: DEBUG oslo_concurrency.lockutils [req-1b087872-1b5d-45d3-bb30-5944beb26de4 req-d92d7686-0f61-40dd-a66e-a4c003e89f54 service nova] Acquiring lock "refresh_cache-2c0c2704-1ccb-4e1f-95e9-62e44b751cc1" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.480357] env[69927]: DEBUG oslo_concurrency.lockutils [req-1b087872-1b5d-45d3-bb30-5944beb26de4 req-d92d7686-0f61-40dd-a66e-a4c003e89f54 service nova] Acquired lock "refresh_cache-2c0c2704-1ccb-4e1f-95e9-62e44b751cc1" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1143.480737] env[69927]: DEBUG nova.network.neutron [req-1b087872-1b5d-45d3-bb30-5944beb26de4 req-d92d7686-0f61-40dd-a66e-a4c003e89f54 service nova] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Refreshing network info cache for port 4729724d-c9d9-4722-987c-93aac6459382 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1143.569945] env[69927]: DEBUG nova.scheduler.client.report [None req-3e80cf04-84ba-4c5a-bfd3-b2bf59989a18 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1143.754448] env[69927]: INFO nova.compute.manager [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Rescuing [ 1143.754767] env[69927]: DEBUG oslo_concurrency.lockutils [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "refresh_cache-cff307ed-3c8b-4126-9749-1204597cbf6c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.754940] env[69927]: DEBUG oslo_concurrency.lockutils [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquired lock "refresh_cache-cff307ed-3c8b-4126-9749-1204597cbf6c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1143.755186] env[69927]: DEBUG nova.network.neutron [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1143.924151] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096626, 'name': CreateVM_Task, 'duration_secs': 0.450327} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.924419] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1143.925508] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.925508] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1143.925934] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1143.927561] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7eebd2ee-a2bd-4254-9543-46cca90b1224 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.939179] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1143.939179] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ba6d96-f283-9c17-b1c1-324a944a5695" [ 1143.939179] env[69927]: _type = "Task" [ 1143.939179] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.955020] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ba6d96-f283-9c17-b1c1-324a944a5695, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.257924] env[69927]: DEBUG nova.network.neutron [req-1b087872-1b5d-45d3-bb30-5944beb26de4 req-d92d7686-0f61-40dd-a66e-a4c003e89f54 service nova] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Updated VIF entry in instance network info cache for port 4729724d-c9d9-4722-987c-93aac6459382. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1144.258982] env[69927]: DEBUG nova.network.neutron [req-1b087872-1b5d-45d3-bb30-5944beb26de4 req-d92d7686-0f61-40dd-a66e-a4c003e89f54 service nova] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Updating instance_info_cache with network_info: [{"id": "4729724d-c9d9-4722-987c-93aac6459382", "address": "fa:16:3e:2d:6a:78", "network": {"id": "2cac2664-2b1d-4bb6-a58a-f8e96679d038", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1945522269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3afde63c8cbe4aecb32a470fd6b948f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4729724d-c9", "ovs_interfaceid": "4729724d-c9d9-4722-987c-93aac6459382", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1144.452112] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ba6d96-f283-9c17-b1c1-324a944a5695, 'name': SearchDatastore_Task, 'duration_secs': 0.017589} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.452560] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1144.452843] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1144.453155] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.453314] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1144.453505] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1144.453793] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c042bade-c091-4029-8389-beb6ca8f44fa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.475282] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1144.475416] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1144.476267] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfe2e969-7387-463b-8f16-28c671e56c66 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.483352] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1144.483352] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e89c21-0a69-c8c1-3b93-03db15dc29de" [ 1144.483352] env[69927]: _type = "Task" [ 1144.483352] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.502376] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e89c21-0a69-c8c1-3b93-03db15dc29de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.550534] env[69927]: DEBUG nova.network.neutron [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Updating instance_info_cache with network_info: [{"id": "619d4a4d-a555-4b6b-a00b-87d1eb721427", "address": "fa:16:3e:91:11:b1", "network": {"id": "527076e8-f800-4686-883d-e70629d8ba0d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-704161827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de9e293e2d1a4e179f01f60e882851b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap619d4a4d-a5", "ovs_interfaceid": "619d4a4d-a555-4b6b-a00b-87d1eb721427", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1144.589341] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3e80cf04-84ba-4c5a-bfd3-b2bf59989a18 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.373s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1144.593612] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.017s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1144.595282] env[69927]: INFO nova.compute.claims [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1144.705010] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "d548ea75-9c1f-4884-b338-194f1b5d62ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1144.705198] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "d548ea75-9c1f-4884-b338-194f1b5d62ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1144.764400] env[69927]: DEBUG oslo_concurrency.lockutils [req-1b087872-1b5d-45d3-bb30-5944beb26de4 req-d92d7686-0f61-40dd-a66e-a4c003e89f54 service nova] Releasing lock "refresh_cache-2c0c2704-1ccb-4e1f-95e9-62e44b751cc1" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1144.764670] env[69927]: DEBUG nova.compute.manager [req-1b087872-1b5d-45d3-bb30-5944beb26de4 req-d92d7686-0f61-40dd-a66e-a4c003e89f54 service nova] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Received event network-changed-9251f381-f500-4fb3-a407-0020e12af1ec {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1144.764843] env[69927]: DEBUG nova.compute.manager [req-1b087872-1b5d-45d3-bb30-5944beb26de4 req-d92d7686-0f61-40dd-a66e-a4c003e89f54 service nova] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Refreshing instance network info cache due to event network-changed-9251f381-f500-4fb3-a407-0020e12af1ec. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1144.765074] env[69927]: DEBUG oslo_concurrency.lockutils [req-1b087872-1b5d-45d3-bb30-5944beb26de4 req-d92d7686-0f61-40dd-a66e-a4c003e89f54 service nova] Acquiring lock "refresh_cache-1b22fbb0-8628-4c69-b9b4-d6d294c7458b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.765220] env[69927]: DEBUG oslo_concurrency.lockutils [req-1b087872-1b5d-45d3-bb30-5944beb26de4 req-d92d7686-0f61-40dd-a66e-a4c003e89f54 service nova] Acquired lock "refresh_cache-1b22fbb0-8628-4c69-b9b4-d6d294c7458b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1144.765383] env[69927]: DEBUG nova.network.neutron [req-1b087872-1b5d-45d3-bb30-5944beb26de4 req-d92d7686-0f61-40dd-a66e-a4c003e89f54 service nova] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Refreshing network info cache for port 9251f381-f500-4fb3-a407-0020e12af1ec {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1144.896952] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "ba7989a1-a644-4eb7-bf65-20ca1810dd62" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1144.897220] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "ba7989a1-a644-4eb7-bf65-20ca1810dd62" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1144.995536] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e89c21-0a69-c8c1-3b93-03db15dc29de, 'name': SearchDatastore_Task, 'duration_secs': 0.015993} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.996660] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a26716a-76e2-40f4-9df3-83a96b2db4ed {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.003234] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1145.003234] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525e5ca0-3b88-bd80-c25f-6c8159ca6fb3" [ 1145.003234] env[69927]: _type = "Task" [ 1145.003234] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.013197] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525e5ca0-3b88-bd80-c25f-6c8159ca6fb3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.057462] env[69927]: DEBUG oslo_concurrency.lockutils [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Releasing lock "refresh_cache-cff307ed-3c8b-4126-9749-1204597cbf6c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1145.176061] env[69927]: INFO nova.scheduler.client.report [None req-3e80cf04-84ba-4c5a-bfd3-b2bf59989a18 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Deleted allocation for migration c94ab390-d71f-484a-9d29-c3ecdb47d6c8 [ 1145.207773] env[69927]: DEBUG nova.compute.manager [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1145.401583] env[69927]: DEBUG nova.compute.manager [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1145.515369] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525e5ca0-3b88-bd80-c25f-6c8159ca6fb3, 'name': SearchDatastore_Task, 'duration_secs': 0.012078} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.515632] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1145.515734] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1/2c0c2704-1ccb-4e1f-95e9-62e44b751cc1.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1145.516044] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b68884e-32d7-40e2-8e55-4220b24109e1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.524744] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1145.524744] env[69927]: value = "task-4096627" [ 1145.524744] env[69927]: _type = "Task" [ 1145.524744] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.535307] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096627, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.536029] env[69927]: DEBUG nova.network.neutron [req-1b087872-1b5d-45d3-bb30-5944beb26de4 req-d92d7686-0f61-40dd-a66e-a4c003e89f54 service nova] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Updated VIF entry in instance network info cache for port 9251f381-f500-4fb3-a407-0020e12af1ec. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1145.536386] env[69927]: DEBUG nova.network.neutron [req-1b087872-1b5d-45d3-bb30-5944beb26de4 req-d92d7686-0f61-40dd-a66e-a4c003e89f54 service nova] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Updating instance_info_cache with network_info: [{"id": "9251f381-f500-4fb3-a407-0020e12af1ec", "address": "fa:16:3e:ab:22:93", "network": {"id": "095e5f02-dbfd-4d96-bd31-c8c5e870e449", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-240997183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "71a47794e5824701925ad4bdc3651196", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7b7edd0-124a-48ec-ae26-1aa14f9b884a", "external-id": "nsx-vlan-transportzone-861", "segmentation_id": 861, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9251f381-f5", "ovs_interfaceid": "9251f381-f500-4fb3-a407-0020e12af1ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.688864] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3e80cf04-84ba-4c5a-bfd3-b2bf59989a18 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "9aa0a285-66e4-4792-bbe9-a62f76666ec6" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 11.304s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.735231] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1145.883447] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1582c710-e99a-4a16-84f4-dc63964ee19c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.894032] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae2d4b88-0be3-4e19-84fa-5e143584bb93 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.940536] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2cfcdf6-7159-4951-abc5-76d38273e67e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.950132] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a81a44-7a9e-47e0-9c48-83d01d4b6d8f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.965188] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1145.977808] env[69927]: DEBUG nova.compute.provider_tree [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1146.039209] env[69927]: DEBUG oslo_concurrency.lockutils [req-1b087872-1b5d-45d3-bb30-5944beb26de4 req-d92d7686-0f61-40dd-a66e-a4c003e89f54 service nova] Releasing lock "refresh_cache-1b22fbb0-8628-4c69-b9b4-d6d294c7458b" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1146.039958] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096627, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.382104] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "da468d11-82a4-4fec-b06a-1b522bacdbc2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.382321] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "da468d11-82a4-4fec-b06a-1b522bacdbc2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.483064] env[69927]: DEBUG nova.scheduler.client.report [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1146.540310] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096627, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.683483} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.540600] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1/2c0c2704-1ccb-4e1f-95e9-62e44b751cc1.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1146.540835] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1146.541102] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c99f8639-423b-4c09-8bf3-8d3b54e17824 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.549334] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1146.549334] env[69927]: value = "task-4096628" [ 1146.549334] env[69927]: _type = "Task" [ 1146.549334] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.558995] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096628, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.601092] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1146.601092] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0e0fc82f-95ad-4e07-8084-9ba7d438b733 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.612038] env[69927]: DEBUG oslo_vmware.api [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1146.612038] env[69927]: value = "task-4096629" [ 1146.612038] env[69927]: _type = "Task" [ 1146.612038] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.621946] env[69927]: DEBUG oslo_vmware.api [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096629, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.695861] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "9aa0a285-66e4-4792-bbe9-a62f76666ec6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.696529] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "9aa0a285-66e4-4792-bbe9-a62f76666ec6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.696529] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "9aa0a285-66e4-4792-bbe9-a62f76666ec6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.696869] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "9aa0a285-66e4-4792-bbe9-a62f76666ec6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.696869] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "9aa0a285-66e4-4792-bbe9-a62f76666ec6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.699670] env[69927]: INFO nova.compute.manager [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Terminating instance [ 1146.885264] env[69927]: DEBUG nova.compute.manager [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1146.989448] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.395s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.989448] env[69927]: DEBUG nova.compute.manager [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1146.992063] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.300s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.994145] env[69927]: INFO nova.compute.claims [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1147.064819] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096628, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.141741} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.064819] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1147.064819] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eec22586-817a-45d8-8f2c-1eb49716319a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.090271] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1/2c0c2704-1ccb-4e1f-95e9-62e44b751cc1.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1147.090590] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a55887f-a57b-42ff-b08f-943b3d0b24a1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.111461] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1147.111461] env[69927]: value = "task-4096630" [ 1147.111461] env[69927]: _type = "Task" [ 1147.111461] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.126861] env[69927]: DEBUG oslo_vmware.api [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096629, 'name': PowerOffVM_Task, 'duration_secs': 0.367989} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.133535] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1147.133535] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096630, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.133535] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2833f597-80ee-4260-a62c-74a30d13ad34 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.156550] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd908dbc-5ee1-41dd-a387-f7f7f5f63f9f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.204511] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1147.204511] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b7330792-b406-4c64-8e17-218c371f6e3a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.207714] env[69927]: DEBUG nova.compute.manager [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1147.207714] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1147.208517] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c8ccf17-fb71-4f0e-bd05-bb8e306f3e29 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.219691] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1147.220744] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9bd5b9c6-3a27-431f-9189-050971c03921 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.223056] env[69927]: DEBUG oslo_vmware.api [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1147.223056] env[69927]: value = "task-4096631" [ 1147.223056] env[69927]: _type = "Task" [ 1147.223056] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.228026] env[69927]: DEBUG oslo_vmware.api [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1147.228026] env[69927]: value = "task-4096632" [ 1147.228026] env[69927]: _type = "Task" [ 1147.228026] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.237096] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] VM already powered off {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1147.237319] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1147.237564] env[69927]: DEBUG oslo_concurrency.lockutils [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1147.237709] env[69927]: DEBUG oslo_concurrency.lockutils [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1147.237882] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1147.244484] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a578778-4bed-4ca1-ac70-ff6522a208b3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.247082] env[69927]: DEBUG oslo_vmware.api [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096632, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.262388] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "6828dc80-2e0e-4715-a620-42edbe5eec2f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.262388] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "6828dc80-2e0e-4715-a620-42edbe5eec2f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1147.262388] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1147.262388] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1147.262983] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73781ab3-c98b-4fcf-80b6-478551038d78 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.269628] env[69927]: DEBUG oslo_vmware.api [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1147.269628] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526278de-69ba-c2fd-2361-47d841b75ac7" [ 1147.269628] env[69927]: _type = "Task" [ 1147.269628] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.275388] env[69927]: DEBUG oslo_vmware.rw_handles [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52091dbf-f9ca-ceb4-c664-d058a54f643a/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1147.276559] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b592c0c-07b2-4f0f-86fe-8a2076a6ce35 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.287962] env[69927]: DEBUG oslo_vmware.api [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526278de-69ba-c2fd-2361-47d841b75ac7, 'name': SearchDatastore_Task, 'duration_secs': 0.011966} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.290524] env[69927]: DEBUG oslo_vmware.rw_handles [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52091dbf-f9ca-ceb4-c664-d058a54f643a/disk-0.vmdk is in state: ready. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1147.290711] env[69927]: ERROR oslo_vmware.rw_handles [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52091dbf-f9ca-ceb4-c664-d058a54f643a/disk-0.vmdk due to incomplete transfer. [ 1147.290967] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-388aa244-bf51-42e5-9274-c78a393bff5e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.293406] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-853bde9b-8bd3-43ac-a8cc-42f291248a6d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.300186] env[69927]: DEBUG oslo_vmware.api [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1147.300186] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52191009-8f5e-691d-ba37-15a81650fb05" [ 1147.300186] env[69927]: _type = "Task" [ 1147.300186] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.305588] env[69927]: DEBUG oslo_vmware.rw_handles [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52091dbf-f9ca-ceb4-c664-d058a54f643a/disk-0.vmdk. {{(pid=69927) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1147.305805] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Uploaded image 67c6308c-617a-40e4-b8e7-aa77c737c6ac to the Glance image server {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1147.308404] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Destroying the VM {{(pid=69927) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1147.309100] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-6ce30676-86c2-4dc5-aa07-9c121ed73bee {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.314813] env[69927]: DEBUG oslo_vmware.api [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52191009-8f5e-691d-ba37-15a81650fb05, 'name': SearchDatastore_Task, 'duration_secs': 0.010996} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.315641] env[69927]: DEBUG oslo_concurrency.lockutils [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1147.315996] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] cff307ed-3c8b-4126-9749-1204597cbf6c/f524494e-9179-4b3e-a3e2-782f019def24-rescue.vmdk. {{(pid=69927) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1147.316331] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b6e33b06-cc31-43ed-abe1-070bed041e26 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.322126] env[69927]: DEBUG oslo_vmware.api [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1147.322126] env[69927]: value = "task-4096633" [ 1147.322126] env[69927]: _type = "Task" [ 1147.322126] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.327578] env[69927]: DEBUG oslo_vmware.api [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1147.327578] env[69927]: value = "task-4096634" [ 1147.327578] env[69927]: _type = "Task" [ 1147.327578] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.335852] env[69927]: DEBUG oslo_vmware.api [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096633, 'name': Destroy_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.343710] env[69927]: DEBUG oslo_vmware.api [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096634, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.413971] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.500762] env[69927]: DEBUG nova.compute.utils [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1147.506677] env[69927]: DEBUG nova.compute.manager [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1147.506677] env[69927]: DEBUG nova.network.neutron [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1147.549726] env[69927]: DEBUG nova.policy [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '948bb5c1ffc54cecb2d4ae1e5c98c11c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3b7ae5270b0643e6b5720d4f2f765d74', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1147.629381] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096630, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.739415] env[69927]: DEBUG oslo_vmware.api [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096632, 'name': PowerOffVM_Task, 'duration_secs': 0.287174} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.739735] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1147.739918] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1147.740196] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e8051120-6e2c-4d8d-be66-4ce15ed55709 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.763814] env[69927]: DEBUG nova.compute.manager [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1147.821754] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1147.822053] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1147.822312] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Deleting the datastore file [datastore2] 9aa0a285-66e4-4792-bbe9-a62f76666ec6 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1147.823180] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-69313cb1-7779-47cf-9701-25c067a20013 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.840716] env[69927]: DEBUG oslo_vmware.api [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1147.840716] env[69927]: value = "task-4096636" [ 1147.840716] env[69927]: _type = "Task" [ 1147.840716] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.848264] env[69927]: DEBUG oslo_vmware.api [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096633, 'name': Destroy_Task} progress is 33%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.852466] env[69927]: DEBUG oslo_vmware.api [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096634, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.859611] env[69927]: DEBUG oslo_vmware.api [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096636, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.899903] env[69927]: DEBUG nova.network.neutron [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Successfully created port: d201dadc-ab89-4ede-8c29-41217e3af341 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1148.008819] env[69927]: DEBUG nova.compute.manager [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1148.127503] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096630, 'name': ReconfigVM_Task, 'duration_secs': 0.549173} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.127798] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Reconfigured VM instance instance-00000061 to attach disk [datastore2] 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1/2c0c2704-1ccb-4e1f-95e9-62e44b751cc1.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1148.128535] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0f6592da-e9e9-4442-a02b-e7a842d0fdfb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.137087] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1148.137087] env[69927]: value = "task-4096637" [ 1148.137087] env[69927]: _type = "Task" [ 1148.137087] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.151365] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096637, 'name': Rename_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.283355] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.287736] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f28f4396-c118-4def-b921-062c323fd428 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.295921] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4bc96cb-178d-49ce-bec8-a08506b23cac {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.331097] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc8368b7-63ee-4b15-9991-bda34c26d76a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.340318] env[69927]: DEBUG oslo_vmware.api [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096633, 'name': Destroy_Task, 'duration_secs': 0.65623} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.343859] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Destroyed the VM [ 1148.344140] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Deleting Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1148.344523] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-729e6ed8-27d3-441c-a820-2b6374290762 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.351313] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03da660d-f468-4198-804d-2aa67fdadf68 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.358940] env[69927]: DEBUG oslo_vmware.api [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096634, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.553034} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.360348] env[69927]: INFO nova.virt.vmwareapi.ds_util [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] cff307ed-3c8b-4126-9749-1204597cbf6c/f524494e-9179-4b3e-a3e2-782f019def24-rescue.vmdk. [ 1148.361626] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de49c9d-ca2a-45f8-877f-7ebccf483dc2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.376457] env[69927]: DEBUG oslo_vmware.api [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096636, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.203673} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.376776] env[69927]: DEBUG oslo_vmware.api [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1148.376776] env[69927]: value = "task-4096638" [ 1148.376776] env[69927]: _type = "Task" [ 1148.376776] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.377346] env[69927]: DEBUG nova.compute.provider_tree [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1148.379270] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1148.379676] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1148.379676] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1148.379797] env[69927]: INFO nova.compute.manager [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1148.382210] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1148.382210] env[69927]: DEBUG nova.compute.manager [-] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1148.382210] env[69927]: DEBUG nova.network.neutron [-] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1148.405628] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] cff307ed-3c8b-4126-9749-1204597cbf6c/f524494e-9179-4b3e-a3e2-782f019def24-rescue.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1148.407463] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98f82830-53d4-4401-9218-6491c2add475 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.425402] env[69927]: DEBUG oslo_vmware.api [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096638, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.431044] env[69927]: DEBUG oslo_vmware.api [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1148.431044] env[69927]: value = "task-4096639" [ 1148.431044] env[69927]: _type = "Task" [ 1148.431044] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.441068] env[69927]: DEBUG oslo_vmware.api [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096639, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.647192] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096637, 'name': Rename_Task, 'duration_secs': 0.215239} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.647538] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1148.647756] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a5bc7335-fa89-4775-b6b8-50ff06b78072 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.656247] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1148.656247] env[69927]: value = "task-4096640" [ 1148.656247] env[69927]: _type = "Task" [ 1148.656247] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.664798] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096640, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.745065] env[69927]: DEBUG nova.compute.manager [req-557cd94b-0b64-4a03-ab5e-6f8dee15febe req-af96014e-4027-4d59-ba16-53e4e14a5d2b service nova] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Received event network-vif-deleted-216a398c-956e-4115-ae6d-b045d946831b {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1148.745315] env[69927]: INFO nova.compute.manager [req-557cd94b-0b64-4a03-ab5e-6f8dee15febe req-af96014e-4027-4d59-ba16-53e4e14a5d2b service nova] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Neutron deleted interface 216a398c-956e-4115-ae6d-b045d946831b; detaching it from the instance and deleting it from the info cache [ 1148.745498] env[69927]: DEBUG nova.network.neutron [req-557cd94b-0b64-4a03-ab5e-6f8dee15febe req-af96014e-4027-4d59-ba16-53e4e14a5d2b service nova] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1148.882115] env[69927]: DEBUG nova.scheduler.client.report [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1148.894878] env[69927]: DEBUG oslo_vmware.api [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096638, 'name': RemoveSnapshot_Task, 'duration_secs': 0.437427} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.895176] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Deleted Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1148.895451] env[69927]: DEBUG nova.compute.manager [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1148.896315] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2e4304c-015d-4d35-b5d0-e9c07fe7a39f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.942714] env[69927]: DEBUG oslo_vmware.api [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096639, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.021607] env[69927]: DEBUG nova.compute.manager [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1149.049332] env[69927]: DEBUG nova.virt.hardware [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1149.049610] env[69927]: DEBUG nova.virt.hardware [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1149.049771] env[69927]: DEBUG nova.virt.hardware [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1149.049952] env[69927]: DEBUG nova.virt.hardware [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1149.050113] env[69927]: DEBUG nova.virt.hardware [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1149.050264] env[69927]: DEBUG nova.virt.hardware [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1149.050539] env[69927]: DEBUG nova.virt.hardware [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1149.050726] env[69927]: DEBUG nova.virt.hardware [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1149.050901] env[69927]: DEBUG nova.virt.hardware [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1149.051078] env[69927]: DEBUG nova.virt.hardware [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1149.051255] env[69927]: DEBUG nova.virt.hardware [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1149.052144] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a548870f-6845-4714-bc55-02330d519ed6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.060842] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88b40d38-e220-4686-b7d5-d6b4e3767100 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.167272] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096640, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.223392] env[69927]: DEBUG nova.network.neutron [-] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1149.249614] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cbd99bb7-69df-4929-8dbc-61510576128e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.260143] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423390b7-3cca-4e5d-9215-4ebfe9768036 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.296250] env[69927]: DEBUG nova.compute.manager [req-557cd94b-0b64-4a03-ab5e-6f8dee15febe req-af96014e-4027-4d59-ba16-53e4e14a5d2b service nova] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Detach interface failed, port_id=216a398c-956e-4115-ae6d-b045d946831b, reason: Instance 9aa0a285-66e4-4792-bbe9-a62f76666ec6 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1149.334616] env[69927]: DEBUG nova.compute.manager [req-57a89726-ba9b-479b-a1f6-8c8a2b934625 req-dba4f2ea-202c-425b-ba62-01ac7ed283d6 service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Received event network-vif-plugged-d201dadc-ab89-4ede-8c29-41217e3af341 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1149.334848] env[69927]: DEBUG oslo_concurrency.lockutils [req-57a89726-ba9b-479b-a1f6-8c8a2b934625 req-dba4f2ea-202c-425b-ba62-01ac7ed283d6 service nova] Acquiring lock "693a6c6b-8d1c-405e-bb17-73259e28f556-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.335070] env[69927]: DEBUG oslo_concurrency.lockutils [req-57a89726-ba9b-479b-a1f6-8c8a2b934625 req-dba4f2ea-202c-425b-ba62-01ac7ed283d6 service nova] Lock "693a6c6b-8d1c-405e-bb17-73259e28f556-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1149.335240] env[69927]: DEBUG oslo_concurrency.lockutils [req-57a89726-ba9b-479b-a1f6-8c8a2b934625 req-dba4f2ea-202c-425b-ba62-01ac7ed283d6 service nova] Lock "693a6c6b-8d1c-405e-bb17-73259e28f556-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.335404] env[69927]: DEBUG nova.compute.manager [req-57a89726-ba9b-479b-a1f6-8c8a2b934625 req-dba4f2ea-202c-425b-ba62-01ac7ed283d6 service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] No waiting events found dispatching network-vif-plugged-d201dadc-ab89-4ede-8c29-41217e3af341 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1149.335564] env[69927]: WARNING nova.compute.manager [req-57a89726-ba9b-479b-a1f6-8c8a2b934625 req-dba4f2ea-202c-425b-ba62-01ac7ed283d6 service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Received unexpected event network-vif-plugged-d201dadc-ab89-4ede-8c29-41217e3af341 for instance with vm_state building and task_state spawning. [ 1149.390034] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.398s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.390687] env[69927]: DEBUG nova.compute.manager [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1149.393529] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8432a4b9-e70a-4c8d-9bdf-c735a1cee27d tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 8.281s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1149.410380] env[69927]: INFO nova.compute.manager [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Shelve offloading [ 1149.440840] env[69927]: DEBUG nova.network.neutron [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Successfully updated port: d201dadc-ab89-4ede-8c29-41217e3af341 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1149.447145] env[69927]: DEBUG oslo_vmware.api [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096639, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.668592] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096640, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.726293] env[69927]: INFO nova.compute.manager [-] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Took 1.35 seconds to deallocate network for instance. [ 1149.902410] env[69927]: DEBUG nova.compute.utils [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1149.904063] env[69927]: DEBUG nova.compute.manager [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1149.904301] env[69927]: DEBUG nova.network.neutron [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1149.914761] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1149.915766] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-954d8997-746b-40fe-ba1f-82fa5bfeaea3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.924228] env[69927]: DEBUG oslo_vmware.api [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1149.924228] env[69927]: value = "task-4096641" [ 1149.924228] env[69927]: _type = "Task" [ 1149.924228] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.942989] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] VM already powered off {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1149.943262] env[69927]: DEBUG nova.compute.manager [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1149.944327] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb6f3543-5cc2-4d84-b739-dee9cb1f228f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.950882] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "refresh_cache-693a6c6b-8d1c-405e-bb17-73259e28f556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.951041] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquired lock "refresh_cache-693a6c6b-8d1c-405e-bb17-73259e28f556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.951193] env[69927]: DEBUG nova.network.neutron [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1149.953120] env[69927]: DEBUG oslo_vmware.api [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096639, 'name': ReconfigVM_Task, 'duration_secs': 1.193037} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.954842] env[69927]: DEBUG nova.policy [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bab156b8df164b21944b7b0681a19571', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '999896abcbbd4ceea4fc2d898e025bc3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1149.959301] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Reconfigured VM instance instance-00000060 to attach disk [datastore1] cff307ed-3c8b-4126-9749-1204597cbf6c/f524494e-9179-4b3e-a3e2-782f019def24-rescue.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1149.961353] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb4e74d-4b48-4c8c-aa3d-a40b33d9fc1a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.965763] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquiring lock "refresh_cache-cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.965925] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquired lock "refresh_cache-cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.966114] env[69927]: DEBUG nova.network.neutron [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1149.997363] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-642e9149-2852-4a27-a64a-64a4cb1527d4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.015920] env[69927]: DEBUG oslo_vmware.api [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1150.015920] env[69927]: value = "task-4096642" [ 1150.015920] env[69927]: _type = "Task" [ 1150.015920] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.031542] env[69927]: DEBUG oslo_vmware.api [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096642, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.169548] env[69927]: DEBUG oslo_vmware.api [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096640, 'name': PowerOnVM_Task, 'duration_secs': 1.124704} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.175701] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1150.175701] env[69927]: INFO nova.compute.manager [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Took 9.82 seconds to spawn the instance on the hypervisor. [ 1150.175701] env[69927]: DEBUG nova.compute.manager [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1150.175701] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98192b3c-4981-4c59-bcad-8460d1ee3f51 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.204019] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ef96761-16a0-485b-abc7-3b0ecf634bd0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.212873] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e1e9ca-9d81-4d4d-bf6a-10788976fa59 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.250317] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1150.252431] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf8e787-53fe-40a0-ba44-02ca07f879bb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.261703] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cadfa1f6-7735-4814-87d5-fadaa560130c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.277276] env[69927]: DEBUG nova.compute.provider_tree [None req-8432a4b9-e70a-4c8d-9bdf-c735a1cee27d tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1150.283363] env[69927]: DEBUG nova.network.neutron [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Successfully created port: c1c89675-9c86-4cf9-9c34-fdea74b6cf04 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1150.407506] env[69927]: DEBUG nova.compute.manager [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1150.497310] env[69927]: DEBUG nova.network.neutron [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1150.528428] env[69927]: DEBUG oslo_vmware.api [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096642, 'name': ReconfigVM_Task, 'duration_secs': 0.31623} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.531322] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1150.531956] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-470ddbb2-a471-4dfd-8ccf-785a15fb481c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.539522] env[69927]: DEBUG oslo_vmware.api [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1150.539522] env[69927]: value = "task-4096643" [ 1150.539522] env[69927]: _type = "Task" [ 1150.539522] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.549726] env[69927]: DEBUG oslo_vmware.api [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096643, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.693138] env[69927]: INFO nova.compute.manager [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Took 24.49 seconds to build instance. [ 1150.770239] env[69927]: DEBUG nova.network.neutron [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Updating instance_info_cache with network_info: [{"id": "eb2105ba-0276-4bc6-a2af-933090d4cdcd", "address": "fa:16:3e:23:0c:58", "network": {"id": "16f178eb-5c9f-4d2d-bde1-6816bb4e832b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1614734358-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef85ff9fc3d240a8a24b6cea8dda0f6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb2105ba-02", "ovs_interfaceid": "eb2105ba-0276-4bc6-a2af-933090d4cdcd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.781466] env[69927]: DEBUG nova.scheduler.client.report [None req-8432a4b9-e70a-4c8d-9bdf-c735a1cee27d tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1150.789953] env[69927]: DEBUG nova.network.neutron [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Updating instance_info_cache with network_info: [{"id": "d201dadc-ab89-4ede-8c29-41217e3af341", "address": "fa:16:3e:6b:c9:4b", "network": {"id": "4a19c4d6-f840-486a-b113-3ec076b5b34d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-437129762-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b7ae5270b0643e6b5720d4f2f765d74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "510d3c47-3615-43d5-aa5d-a279fd915e71", "external-id": "nsx-vlan-transportzone-436", "segmentation_id": 436, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd201dadc-ab", "ovs_interfaceid": "d201dadc-ab89-4ede-8c29-41217e3af341", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1151.051942] env[69927]: DEBUG oslo_vmware.api [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096643, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.199957] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a6f206f4-aa50-4296-8ff1-5f42569b9d2d tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "2c0c2704-1ccb-4e1f-95e9-62e44b751cc1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.011s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.273430] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Releasing lock "refresh_cache-cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1151.292297] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Releasing lock "refresh_cache-693a6c6b-8d1c-405e-bb17-73259e28f556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1151.292590] env[69927]: DEBUG nova.compute.manager [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Instance network_info: |[{"id": "d201dadc-ab89-4ede-8c29-41217e3af341", "address": "fa:16:3e:6b:c9:4b", "network": {"id": "4a19c4d6-f840-486a-b113-3ec076b5b34d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-437129762-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b7ae5270b0643e6b5720d4f2f765d74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "510d3c47-3615-43d5-aa5d-a279fd915e71", "external-id": "nsx-vlan-transportzone-436", "segmentation_id": 436, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd201dadc-ab", "ovs_interfaceid": "d201dadc-ab89-4ede-8c29-41217e3af341", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1151.293017] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6b:c9:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '510d3c47-3615-43d5-aa5d-a279fd915e71', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd201dadc-ab89-4ede-8c29-41217e3af341', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1151.300563] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1151.300808] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1151.301048] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1842077d-6a1a-444c-8bb8-a088e92bcc1e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.324889] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1151.324889] env[69927]: value = "task-4096644" [ 1151.324889] env[69927]: _type = "Task" [ 1151.324889] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.326112] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc563889-3067-4389-b43c-e6be98160c01 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.336814] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7ccf9a4d-0b39-492c-ae4a-0767bef0257a tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Suspending the VM {{(pid=69927) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1151.340686] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-4d51721f-94ac-40f2-8fab-0902aa831e4e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.342673] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096644, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.349329] env[69927]: DEBUG oslo_vmware.api [None req-7ccf9a4d-0b39-492c-ae4a-0767bef0257a tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1151.349329] env[69927]: value = "task-4096645" [ 1151.349329] env[69927]: _type = "Task" [ 1151.349329] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.361521] env[69927]: DEBUG oslo_vmware.api [None req-7ccf9a4d-0b39-492c-ae4a-0767bef0257a tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096645, 'name': SuspendVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.364412] env[69927]: DEBUG nova.compute.manager [req-c24f0529-fbe8-4df6-aa65-352a27f747aa req-30b5626a-e9fc-46b4-82a4-20424e6afa1b service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Received event network-changed-d201dadc-ab89-4ede-8c29-41217e3af341 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1151.364636] env[69927]: DEBUG nova.compute.manager [req-c24f0529-fbe8-4df6-aa65-352a27f747aa req-30b5626a-e9fc-46b4-82a4-20424e6afa1b service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Refreshing instance network info cache due to event network-changed-d201dadc-ab89-4ede-8c29-41217e3af341. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1151.364817] env[69927]: DEBUG oslo_concurrency.lockutils [req-c24f0529-fbe8-4df6-aa65-352a27f747aa req-30b5626a-e9fc-46b4-82a4-20424e6afa1b service nova] Acquiring lock "refresh_cache-693a6c6b-8d1c-405e-bb17-73259e28f556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.364957] env[69927]: DEBUG oslo_concurrency.lockutils [req-c24f0529-fbe8-4df6-aa65-352a27f747aa req-30b5626a-e9fc-46b4-82a4-20424e6afa1b service nova] Acquired lock "refresh_cache-693a6c6b-8d1c-405e-bb17-73259e28f556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.365149] env[69927]: DEBUG nova.network.neutron [req-c24f0529-fbe8-4df6-aa65-352a27f747aa req-30b5626a-e9fc-46b4-82a4-20424e6afa1b service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Refreshing network info cache for port d201dadc-ab89-4ede-8c29-41217e3af341 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1151.424136] env[69927]: DEBUG nova.compute.manager [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1151.470035] env[69927]: DEBUG nova.virt.hardware [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1151.470035] env[69927]: DEBUG nova.virt.hardware [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1151.470344] env[69927]: DEBUG nova.virt.hardware [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1151.470344] env[69927]: DEBUG nova.virt.hardware [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1151.470451] env[69927]: DEBUG nova.virt.hardware [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1151.470592] env[69927]: DEBUG nova.virt.hardware [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1151.470824] env[69927]: DEBUG nova.virt.hardware [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1151.474027] env[69927]: DEBUG nova.virt.hardware [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1151.474027] env[69927]: DEBUG nova.virt.hardware [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1151.474027] env[69927]: DEBUG nova.virt.hardware [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1151.474027] env[69927]: DEBUG nova.virt.hardware [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1151.474027] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db7010dc-ec2d-4a89-b5a0-cbf969d7900b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.481404] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d8f7af-1045-4cc1-8ba7-ac3f8e5ec71a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.550984] env[69927]: DEBUG oslo_vmware.api [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096643, 'name': PowerOnVM_Task, 'duration_secs': 0.565842} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.551280] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1151.554322] env[69927]: DEBUG nova.compute.manager [None req-75dff92f-89bf-4c89-8a0e-1baa96b7014e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1151.555170] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d23ade-4bbf-44d3-8d19-268ea25be76e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.794812] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8432a4b9-e70a-4c8d-9bdf-c735a1cee27d tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.401s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.795216] env[69927]: DEBUG nova.compute.manager [None req-8432a4b9-e70a-4c8d-9bdf-c735a1cee27d tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=69927) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5376}} [ 1151.800288] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.065s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.802617] env[69927]: INFO nova.compute.claims [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1151.836556] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096644, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.863025] env[69927]: DEBUG oslo_vmware.api [None req-7ccf9a4d-0b39-492c-ae4a-0767bef0257a tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096645, 'name': SuspendVM_Task} progress is 62%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.994372] env[69927]: DEBUG nova.network.neutron [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Successfully updated port: c1c89675-9c86-4cf9-9c34-fdea74b6cf04 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1152.092061] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1152.093339] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bb4bbe1-8f42-4ef0-83dd-087a52302562 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.105407] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1152.105820] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-08045419-15d7-456f-b5f4-bd1bdc7b0c0f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.139637] env[69927]: DEBUG nova.network.neutron [req-c24f0529-fbe8-4df6-aa65-352a27f747aa req-30b5626a-e9fc-46b4-82a4-20424e6afa1b service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Updated VIF entry in instance network info cache for port d201dadc-ab89-4ede-8c29-41217e3af341. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1152.139637] env[69927]: DEBUG nova.network.neutron [req-c24f0529-fbe8-4df6-aa65-352a27f747aa req-30b5626a-e9fc-46b4-82a4-20424e6afa1b service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Updating instance_info_cache with network_info: [{"id": "d201dadc-ab89-4ede-8c29-41217e3af341", "address": "fa:16:3e:6b:c9:4b", "network": {"id": "4a19c4d6-f840-486a-b113-3ec076b5b34d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-437129762-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b7ae5270b0643e6b5720d4f2f765d74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "510d3c47-3615-43d5-aa5d-a279fd915e71", "external-id": "nsx-vlan-transportzone-436", "segmentation_id": 436, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd201dadc-ab", "ovs_interfaceid": "d201dadc-ab89-4ede-8c29-41217e3af341", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.175450] env[69927]: DEBUG nova.compute.manager [req-15e2d79b-6b71-4ac4-b847-7358645dc23d req-239f48bf-1256-4d84-9826-90c6c6e9b91f service nova] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Received event network-vif-unplugged-eb2105ba-0276-4bc6-a2af-933090d4cdcd {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1152.175450] env[69927]: DEBUG oslo_concurrency.lockutils [req-15e2d79b-6b71-4ac4-b847-7358645dc23d req-239f48bf-1256-4d84-9826-90c6c6e9b91f service nova] Acquiring lock "cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.175450] env[69927]: DEBUG oslo_concurrency.lockutils [req-15e2d79b-6b71-4ac4-b847-7358645dc23d req-239f48bf-1256-4d84-9826-90c6c6e9b91f service nova] Lock "cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1152.176296] env[69927]: DEBUG oslo_concurrency.lockutils [req-15e2d79b-6b71-4ac4-b847-7358645dc23d req-239f48bf-1256-4d84-9826-90c6c6e9b91f service nova] Lock "cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1152.176737] env[69927]: DEBUG nova.compute.manager [req-15e2d79b-6b71-4ac4-b847-7358645dc23d req-239f48bf-1256-4d84-9826-90c6c6e9b91f service nova] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] No waiting events found dispatching network-vif-unplugged-eb2105ba-0276-4bc6-a2af-933090d4cdcd {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1152.177221] env[69927]: WARNING nova.compute.manager [req-15e2d79b-6b71-4ac4-b847-7358645dc23d req-239f48bf-1256-4d84-9826-90c6c6e9b91f service nova] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Received unexpected event network-vif-unplugged-eb2105ba-0276-4bc6-a2af-933090d4cdcd for instance with vm_state shelved and task_state shelving_offloading. [ 1152.187382] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1152.187382] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1152.187382] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Deleting the datastore file [datastore1] cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1152.187382] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d3b0700-9dd2-4f7f-8a40-532c3d9f3fd5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.197802] env[69927]: DEBUG oslo_vmware.api [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1152.197802] env[69927]: value = "task-4096647" [ 1152.197802] env[69927]: _type = "Task" [ 1152.197802] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.209659] env[69927]: DEBUG oslo_vmware.api [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096647, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.339285] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096644, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.362251] env[69927]: DEBUG oslo_vmware.api [None req-7ccf9a4d-0b39-492c-ae4a-0767bef0257a tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096645, 'name': SuspendVM_Task, 'duration_secs': 0.757535} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.362251] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7ccf9a4d-0b39-492c-ae4a-0767bef0257a tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Suspended the VM {{(pid=69927) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1152.363171] env[69927]: DEBUG nova.compute.manager [None req-7ccf9a4d-0b39-492c-ae4a-0767bef0257a tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1152.363840] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86fcc21d-38bb-4849-841b-7de62ae0e882 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.376295] env[69927]: INFO nova.scheduler.client.report [None req-8432a4b9-e70a-4c8d-9bdf-c735a1cee27d tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Deleted allocation for migration c390783a-4fa8-4820-8f62-f91ac3968718 [ 1152.497368] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Acquiring lock "refresh_cache-7ff17f1d-31fd-440b-906c-2719770a9151" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.497539] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Acquired lock "refresh_cache-7ff17f1d-31fd-440b-906c-2719770a9151" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1152.497708] env[69927]: DEBUG nova.network.neutron [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1152.641643] env[69927]: DEBUG oslo_concurrency.lockutils [req-c24f0529-fbe8-4df6-aa65-352a27f747aa req-30b5626a-e9fc-46b4-82a4-20424e6afa1b service nova] Releasing lock "refresh_cache-693a6c6b-8d1c-405e-bb17-73259e28f556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1152.708400] env[69927]: DEBUG oslo_vmware.api [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096647, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154245} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.708722] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1152.708911] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1152.709101] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1152.732191] env[69927]: INFO nova.scheduler.client.report [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Deleted allocations for instance cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a [ 1152.837543] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096644, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.884592] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8432a4b9-e70a-4c8d-9bdf-c735a1cee27d tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "4b7934f8-2c97-480b-8af7-f09f6819e2b6" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 15.436s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.018546] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8218b529-6313-4b39-83b3-a5551c4de7eb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.026079] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65cf896e-2682-4c28-bedb-1ac466f86896 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.030160] env[69927]: DEBUG nova.network.neutron [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1153.064798] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f79e2a-6e8c-40b6-83d9-39cece97b567 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.074514] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba08473b-f29a-4bb5-8da7-0d743b571ad5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.090895] env[69927]: DEBUG nova.compute.provider_tree [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1153.190510] env[69927]: DEBUG nova.network.neutron [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Updating instance_info_cache with network_info: [{"id": "c1c89675-9c86-4cf9-9c34-fdea74b6cf04", "address": "fa:16:3e:71:4d:cf", "network": {"id": "dd75b393-aca1-4de7-8249-d8eec36d040d", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1189166772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "999896abcbbd4ceea4fc2d898e025bc3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f85835c8-5d0c-4b2f-97c4-6c4006580f79", "external-id": "nsx-vlan-transportzone-245", "segmentation_id": 245, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1c89675-9c", "ovs_interfaceid": "c1c89675-9c86-4cf9-9c34-fdea74b6cf04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1153.237023] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1153.337847] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096644, 'name': CreateVM_Task, 'duration_secs': 1.672977} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.338073] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1153.339121] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.339354] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1153.339709] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1153.340184] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ffbd6a5-25a4-4a07-a24d-59e9d4b66550 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.344939] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1153.344939] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528fb8a8-90c5-ab38-3aa4-b0e56e640309" [ 1153.344939] env[69927]: _type = "Task" [ 1153.344939] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.354126] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528fb8a8-90c5-ab38-3aa4-b0e56e640309, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.390711] env[69927]: DEBUG nova.objects.instance [None req-8a346191-09c6-4eb2-bfe0-be075f801481 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lazy-loading 'flavor' on Instance uuid 4b7934f8-2c97-480b-8af7-f09f6819e2b6 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1153.398827] env[69927]: DEBUG nova.compute.manager [req-43746ddd-d46b-4b83-b2db-dfb740f0d127 req-9d389726-e153-40cd-b633-292942cf29be service nova] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Received event network-vif-plugged-c1c89675-9c86-4cf9-9c34-fdea74b6cf04 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1153.398827] env[69927]: DEBUG oslo_concurrency.lockutils [req-43746ddd-d46b-4b83-b2db-dfb740f0d127 req-9d389726-e153-40cd-b633-292942cf29be service nova] Acquiring lock "7ff17f1d-31fd-440b-906c-2719770a9151-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1153.398827] env[69927]: DEBUG oslo_concurrency.lockutils [req-43746ddd-d46b-4b83-b2db-dfb740f0d127 req-9d389726-e153-40cd-b633-292942cf29be service nova] Lock "7ff17f1d-31fd-440b-906c-2719770a9151-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1153.398827] env[69927]: DEBUG oslo_concurrency.lockutils [req-43746ddd-d46b-4b83-b2db-dfb740f0d127 req-9d389726-e153-40cd-b633-292942cf29be service nova] Lock "7ff17f1d-31fd-440b-906c-2719770a9151-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.398827] env[69927]: DEBUG nova.compute.manager [req-43746ddd-d46b-4b83-b2db-dfb740f0d127 req-9d389726-e153-40cd-b633-292942cf29be service nova] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] No waiting events found dispatching network-vif-plugged-c1c89675-9c86-4cf9-9c34-fdea74b6cf04 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1153.399308] env[69927]: WARNING nova.compute.manager [req-43746ddd-d46b-4b83-b2db-dfb740f0d127 req-9d389726-e153-40cd-b633-292942cf29be service nova] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Received unexpected event network-vif-plugged-c1c89675-9c86-4cf9-9c34-fdea74b6cf04 for instance with vm_state building and task_state spawning. [ 1153.399308] env[69927]: DEBUG nova.compute.manager [req-43746ddd-d46b-4b83-b2db-dfb740f0d127 req-9d389726-e153-40cd-b633-292942cf29be service nova] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Received event network-changed-c1c89675-9c86-4cf9-9c34-fdea74b6cf04 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1153.399402] env[69927]: DEBUG nova.compute.manager [req-43746ddd-d46b-4b83-b2db-dfb740f0d127 req-9d389726-e153-40cd-b633-292942cf29be service nova] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Refreshing instance network info cache due to event network-changed-c1c89675-9c86-4cf9-9c34-fdea74b6cf04. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1153.399497] env[69927]: DEBUG oslo_concurrency.lockutils [req-43746ddd-d46b-4b83-b2db-dfb740f0d127 req-9d389726-e153-40cd-b633-292942cf29be service nova] Acquiring lock "refresh_cache-7ff17f1d-31fd-440b-906c-2719770a9151" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.594640] env[69927]: DEBUG nova.scheduler.client.report [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1153.693634] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Releasing lock "refresh_cache-7ff17f1d-31fd-440b-906c-2719770a9151" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1153.694029] env[69927]: DEBUG nova.compute.manager [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Instance network_info: |[{"id": "c1c89675-9c86-4cf9-9c34-fdea74b6cf04", "address": "fa:16:3e:71:4d:cf", "network": {"id": "dd75b393-aca1-4de7-8249-d8eec36d040d", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1189166772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "999896abcbbd4ceea4fc2d898e025bc3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f85835c8-5d0c-4b2f-97c4-6c4006580f79", "external-id": "nsx-vlan-transportzone-245", "segmentation_id": 245, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1c89675-9c", "ovs_interfaceid": "c1c89675-9c86-4cf9-9c34-fdea74b6cf04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1153.694382] env[69927]: DEBUG oslo_concurrency.lockutils [req-43746ddd-d46b-4b83-b2db-dfb740f0d127 req-9d389726-e153-40cd-b633-292942cf29be service nova] Acquired lock "refresh_cache-7ff17f1d-31fd-440b-906c-2719770a9151" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1153.694621] env[69927]: DEBUG nova.network.neutron [req-43746ddd-d46b-4b83-b2db-dfb740f0d127 req-9d389726-e153-40cd-b633-292942cf29be service nova] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Refreshing network info cache for port c1c89675-9c86-4cf9-9c34-fdea74b6cf04 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1153.695896] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:4d:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f85835c8-5d0c-4b2f-97c4-6c4006580f79', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c1c89675-9c86-4cf9-9c34-fdea74b6cf04', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1153.704489] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Creating folder: Project (999896abcbbd4ceea4fc2d898e025bc3). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1153.708664] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-80061fb4-dd2c-41c7-a154-5b16e88f5a93 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.725930] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Created folder: Project (999896abcbbd4ceea4fc2d898e025bc3) in parent group-v811283. [ 1153.726253] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Creating folder: Instances. Parent ref: group-v811564. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1153.726601] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-25f40837-9291-4477-b23d-096b6bedf0a7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.739862] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Created folder: Instances in parent group-v811564. [ 1153.740145] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1153.740352] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1153.740601] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-95e3c9db-0752-43b8-93e0-94e4055a44c2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.764140] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1153.764140] env[69927]: value = "task-4096650" [ 1153.764140] env[69927]: _type = "Task" [ 1153.764140] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.773337] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096650, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.858553] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528fb8a8-90c5-ab38-3aa4-b0e56e640309, 'name': SearchDatastore_Task, 'duration_secs': 0.010581} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.858891] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1153.859276] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1153.859602] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.859720] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1153.860158] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1153.860464] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c5f6eb1-524e-4bb8-af3c-f0214c160c57 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.870614] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1153.870614] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1153.871390] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12f5a7f8-6733-47c4-b1e1-319a8a3a3f24 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.877444] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1153.877444] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526b919e-1474-55ec-ee84-ca7871aad534" [ 1153.877444] env[69927]: _type = "Task" [ 1153.877444] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.890579] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526b919e-1474-55ec-ee84-ca7871aad534, 'name': SearchDatastore_Task, 'duration_secs': 0.010161} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.891312] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21b45974-1776-4f62-9dd6-c13188eaf1dd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.896651] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8a346191-09c6-4eb2-bfe0-be075f801481 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "refresh_cache-4b7934f8-2c97-480b-8af7-f09f6819e2b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.896893] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8a346191-09c6-4eb2-bfe0-be075f801481 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquired lock "refresh_cache-4b7934f8-2c97-480b-8af7-f09f6819e2b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1153.897107] env[69927]: DEBUG nova.network.neutron [None req-8a346191-09c6-4eb2-bfe0-be075f801481 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1153.897327] env[69927]: DEBUG nova.objects.instance [None req-8a346191-09c6-4eb2-bfe0-be075f801481 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lazy-loading 'info_cache' on Instance uuid 4b7934f8-2c97-480b-8af7-f09f6819e2b6 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1153.901336] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1153.901336] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525282f8-1e80-b2e9-a007-31e6fd338158" [ 1153.901336] env[69927]: _type = "Task" [ 1153.901336] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.910221] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525282f8-1e80-b2e9-a007-31e6fd338158, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.920457] env[69927]: DEBUG oslo_concurrency.lockutils [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "2c0c2704-1ccb-4e1f-95e9-62e44b751cc1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1153.920719] env[69927]: DEBUG oslo_concurrency.lockutils [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "2c0c2704-1ccb-4e1f-95e9-62e44b751cc1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1153.920941] env[69927]: DEBUG oslo_concurrency.lockutils [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "2c0c2704-1ccb-4e1f-95e9-62e44b751cc1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1153.921144] env[69927]: DEBUG oslo_concurrency.lockutils [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "2c0c2704-1ccb-4e1f-95e9-62e44b751cc1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1153.921401] env[69927]: DEBUG oslo_concurrency.lockutils [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "2c0c2704-1ccb-4e1f-95e9-62e44b751cc1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.923453] env[69927]: INFO nova.compute.manager [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Terminating instance [ 1153.955100] env[69927]: DEBUG nova.network.neutron [req-43746ddd-d46b-4b83-b2db-dfb740f0d127 req-9d389726-e153-40cd-b633-292942cf29be service nova] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Updated VIF entry in instance network info cache for port c1c89675-9c86-4cf9-9c34-fdea74b6cf04. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1153.955919] env[69927]: DEBUG nova.network.neutron [req-43746ddd-d46b-4b83-b2db-dfb740f0d127 req-9d389726-e153-40cd-b633-292942cf29be service nova] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Updating instance_info_cache with network_info: [{"id": "c1c89675-9c86-4cf9-9c34-fdea74b6cf04", "address": "fa:16:3e:71:4d:cf", "network": {"id": "dd75b393-aca1-4de7-8249-d8eec36d040d", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1189166772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "999896abcbbd4ceea4fc2d898e025bc3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f85835c8-5d0c-4b2f-97c4-6c4006580f79", "external-id": "nsx-vlan-transportzone-245", "segmentation_id": 245, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1c89675-9c", "ovs_interfaceid": "c1c89675-9c86-4cf9-9c34-fdea74b6cf04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.103250] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.303s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.103250] env[69927]: DEBUG nova.compute.manager [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1154.106338] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.142s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1154.107781] env[69927]: INFO nova.compute.claims [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1154.205458] env[69927]: DEBUG nova.compute.manager [req-32584e52-3443-4aad-a4ea-76dcc0560dca req-883af30f-03e2-4b5f-ab08-e918bf027c26 service nova] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Received event network-changed-eb2105ba-0276-4bc6-a2af-933090d4cdcd {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1154.205666] env[69927]: DEBUG nova.compute.manager [req-32584e52-3443-4aad-a4ea-76dcc0560dca req-883af30f-03e2-4b5f-ab08-e918bf027c26 service nova] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Refreshing instance network info cache due to event network-changed-eb2105ba-0276-4bc6-a2af-933090d4cdcd. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1154.205883] env[69927]: DEBUG oslo_concurrency.lockutils [req-32584e52-3443-4aad-a4ea-76dcc0560dca req-883af30f-03e2-4b5f-ab08-e918bf027c26 service nova] Acquiring lock "refresh_cache-cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.206046] env[69927]: DEBUG oslo_concurrency.lockutils [req-32584e52-3443-4aad-a4ea-76dcc0560dca req-883af30f-03e2-4b5f-ab08-e918bf027c26 service nova] Acquired lock "refresh_cache-cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1154.206211] env[69927]: DEBUG nova.network.neutron [req-32584e52-3443-4aad-a4ea-76dcc0560dca req-883af30f-03e2-4b5f-ab08-e918bf027c26 service nova] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Refreshing network info cache for port eb2105ba-0276-4bc6-a2af-933090d4cdcd {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1154.214682] env[69927]: INFO nova.compute.manager [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Rescuing [ 1154.214954] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "refresh_cache-15c44d86-829f-4317-ab66-9e61d4fb4dd0" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.215101] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquired lock "refresh_cache-15c44d86-829f-4317-ab66-9e61d4fb4dd0" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1154.215270] env[69927]: DEBUG nova.network.neutron [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1154.276590] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096650, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.403124] env[69927]: DEBUG nova.objects.base [None req-8a346191-09c6-4eb2-bfe0-be075f801481 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Object Instance<4b7934f8-2c97-480b-8af7-f09f6819e2b6> lazy-loaded attributes: flavor,info_cache {{(pid=69927) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1154.415186] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525282f8-1e80-b2e9-a007-31e6fd338158, 'name': SearchDatastore_Task, 'duration_secs': 0.010068} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.416018] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1154.416307] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 693a6c6b-8d1c-405e-bb17-73259e28f556/693a6c6b-8d1c-405e-bb17-73259e28f556.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1154.416635] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2fad3ef6-3921-4e4d-b207-543f86ce0a4f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.425472] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1154.425472] env[69927]: value = "task-4096651" [ 1154.425472] env[69927]: _type = "Task" [ 1154.425472] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.429537] env[69927]: DEBUG nova.compute.manager [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1154.429734] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1154.430582] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c61075-0764-4543-9c5e-116ff19856e1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.439465] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096651, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.441298] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1154.441553] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fd877681-d84b-4ca5-b365-a3ebc058cdb2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.458324] env[69927]: DEBUG oslo_concurrency.lockutils [req-43746ddd-d46b-4b83-b2db-dfb740f0d127 req-9d389726-e153-40cd-b633-292942cf29be service nova] Releasing lock "refresh_cache-7ff17f1d-31fd-440b-906c-2719770a9151" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1154.522590] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1154.522814] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1154.523014] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Deleting the datastore file [datastore2] 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1154.523331] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-224794d6-540b-4d07-8f06-a1047be4b8ad {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.530327] env[69927]: DEBUG oslo_vmware.api [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1154.530327] env[69927]: value = "task-4096653" [ 1154.530327] env[69927]: _type = "Task" [ 1154.530327] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.538827] env[69927]: DEBUG oslo_vmware.api [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.613161] env[69927]: DEBUG nova.compute.utils [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1154.617031] env[69927]: DEBUG nova.compute.manager [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1154.617176] env[69927]: DEBUG nova.network.neutron [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1154.665637] env[69927]: DEBUG nova.policy [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ed20f23b4104e2ea75ea29b804c79d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ed984d7170742eca7e89bf3bf45e6ae', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1154.776822] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096650, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.942479] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096651, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.014989] env[69927]: DEBUG nova.network.neutron [req-32584e52-3443-4aad-a4ea-76dcc0560dca req-883af30f-03e2-4b5f-ab08-e918bf027c26 service nova] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Updated VIF entry in instance network info cache for port eb2105ba-0276-4bc6-a2af-933090d4cdcd. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1155.015562] env[69927]: DEBUG nova.network.neutron [req-32584e52-3443-4aad-a4ea-76dcc0560dca req-883af30f-03e2-4b5f-ab08-e918bf027c26 service nova] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Updating instance_info_cache with network_info: [{"id": "eb2105ba-0276-4bc6-a2af-933090d4cdcd", "address": "fa:16:3e:23:0c:58", "network": {"id": "16f178eb-5c9f-4d2d-bde1-6816bb4e832b", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1614734358-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef85ff9fc3d240a8a24b6cea8dda0f6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapeb2105ba-02", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.046548] env[69927]: DEBUG oslo_vmware.api [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096653, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162989} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.046548] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1155.046548] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1155.046548] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1155.046810] env[69927]: INFO nova.compute.manager [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1155.046849] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1155.047523] env[69927]: DEBUG nova.compute.manager [-] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1155.047577] env[69927]: DEBUG nova.network.neutron [-] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1155.118070] env[69927]: DEBUG nova.compute.manager [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1155.199118] env[69927]: DEBUG nova.network.neutron [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Updating instance_info_cache with network_info: [{"id": "30a26167-3dd4-4729-be64-03c251eaaa48", "address": "fa:16:3e:0e:18:44", "network": {"id": "527076e8-f800-4686-883d-e70629d8ba0d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-704161827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de9e293e2d1a4e179f01f60e882851b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30a26167-3d", "ovs_interfaceid": "30a26167-3dd4-4729-be64-03c251eaaa48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.281024] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096650, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.283236] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquiring lock "cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1155.337695] env[69927]: DEBUG nova.network.neutron [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Successfully created port: b92f830b-5eef-4260-a56b-94af4a4ec679 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1155.402703] env[69927]: DEBUG nova.network.neutron [None req-8a346191-09c6-4eb2-bfe0-be075f801481 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Updating instance_info_cache with network_info: [{"id": "af2d617c-7a43-466f-b19d-3cce0c52c836", "address": "fa:16:3e:ea:1d:36", "network": {"id": "bd90fedf-a086-42e6-9814-07044a035f6e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-951403655-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cf6bb3492c642aa9a168e484299289c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf2d617c-7a", "ovs_interfaceid": "af2d617c-7a43-466f-b19d-3cce0c52c836", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.444023] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096651, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.742307} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.444023] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 693a6c6b-8d1c-405e-bb17-73259e28f556/693a6c6b-8d1c-405e-bb17-73259e28f556.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1155.444023] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1155.444023] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f9b6729b-b573-48ef-9c84-29fe4094873d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.457626] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1155.457626] env[69927]: value = "task-4096654" [ 1155.457626] env[69927]: _type = "Task" [ 1155.457626] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.470559] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096654, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.487275] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b48615-7989-476e-b6e4-c5e93634816e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.498186] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9490b603-bf6f-4075-95a2-107697bd0a29 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.531008] env[69927]: DEBUG oslo_concurrency.lockutils [req-32584e52-3443-4aad-a4ea-76dcc0560dca req-883af30f-03e2-4b5f-ab08-e918bf027c26 service nova] Releasing lock "refresh_cache-cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1155.532086] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e867527-2130-40ae-a226-3323f3ec57e1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.540461] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56809e30-6a96-46a2-90c6-da73f926d769 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.556637] env[69927]: DEBUG nova.compute.provider_tree [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1155.666511] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Acquiring lock "406828cc-c6aa-4686-827d-c7c8e28ffb8e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1155.666998] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Lock "406828cc-c6aa-4686-827d-c7c8e28ffb8e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.667347] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Acquiring lock "406828cc-c6aa-4686-827d-c7c8e28ffb8e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1155.667651] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Lock "406828cc-c6aa-4686-827d-c7c8e28ffb8e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.667938] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Lock "406828cc-c6aa-4686-827d-c7c8e28ffb8e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.670627] env[69927]: INFO nova.compute.manager [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Terminating instance [ 1155.701892] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Releasing lock "refresh_cache-15c44d86-829f-4317-ab66-9e61d4fb4dd0" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1155.778767] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096650, 'name': CreateVM_Task, 'duration_secs': 1.551141} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.778927] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1155.779690] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.779859] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1155.780197] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1155.780469] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a59129c-70d6-47c9-8b66-dc2c87540a78 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.787208] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Waiting for the task: (returnval){ [ 1155.787208] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ea17c6-a628-8afe-96ea-edefb2a3f2c7" [ 1155.787208] env[69927]: _type = "Task" [ 1155.787208] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.796367] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ea17c6-a628-8afe-96ea-edefb2a3f2c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.905028] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8a346191-09c6-4eb2-bfe0-be075f801481 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Releasing lock "refresh_cache-4b7934f8-2c97-480b-8af7-f09f6819e2b6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1155.965178] env[69927]: DEBUG nova.network.neutron [-] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.966444] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096654, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078227} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.967147] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1155.967651] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b13d7029-f172-4b3c-8f88-056ef93b26dd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.993506] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] 693a6c6b-8d1c-405e-bb17-73259e28f556/693a6c6b-8d1c-405e-bb17-73259e28f556.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1155.994233] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9af838e9-93e8-49e8-b01f-4bdba558b2b1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.015296] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1156.015296] env[69927]: value = "task-4096655" [ 1156.015296] env[69927]: _type = "Task" [ 1156.015296] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.024025] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096655, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.060017] env[69927]: DEBUG nova.scheduler.client.report [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1156.138989] env[69927]: DEBUG nova.compute.manager [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1156.162952] env[69927]: DEBUG nova.virt.hardware [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1156.163295] env[69927]: DEBUG nova.virt.hardware [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1156.163544] env[69927]: DEBUG nova.virt.hardware [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1156.163775] env[69927]: DEBUG nova.virt.hardware [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1156.163958] env[69927]: DEBUG nova.virt.hardware [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1156.164159] env[69927]: DEBUG nova.virt.hardware [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1156.164405] env[69927]: DEBUG nova.virt.hardware [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1156.164596] env[69927]: DEBUG nova.virt.hardware [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1156.164801] env[69927]: DEBUG nova.virt.hardware [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1156.165040] env[69927]: DEBUG nova.virt.hardware [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1156.165222] env[69927]: DEBUG nova.virt.hardware [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1156.166161] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f5cf7a-1d54-44de-8f66-7594c296d109 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.174340] env[69927]: DEBUG nova.compute.manager [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1156.174547] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1156.175389] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37444655-1f85-4feb-a9bc-5cea9d849c78 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.178961] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1de58122-d7d8-498a-8aa8-87320df1c44d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.189465] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1156.198143] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b9414bf-c057-48bf-a6ec-494a7059370a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.204787] env[69927]: DEBUG oslo_vmware.api [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for the task: (returnval){ [ 1156.204787] env[69927]: value = "task-4096656" [ 1156.204787] env[69927]: _type = "Task" [ 1156.204787] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.214369] env[69927]: DEBUG oslo_vmware.api [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096656, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.241661] env[69927]: DEBUG nova.compute.manager [req-0f53733e-8cbc-40b7-937b-cbc9f1200717 req-cc374a1b-eda3-4ae7-93ae-5e9cee913c6e service nova] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Received event network-vif-deleted-4729724d-c9d9-4722-987c-93aac6459382 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1156.297336] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ea17c6-a628-8afe-96ea-edefb2a3f2c7, 'name': SearchDatastore_Task, 'duration_secs': 0.01432} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.297774] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1156.298056] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1156.298331] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.298494] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1156.298680] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1156.298949] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a55c3b0-ac60-4fda-b38d-e5b3d136be8f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.309092] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1156.309313] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1156.310113] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-753396e9-93a4-47b1-9d1c-680ab0d8fc5d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.316164] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Waiting for the task: (returnval){ [ 1156.316164] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52784458-90c1-2f0c-69ee-1b53bf7ae4d0" [ 1156.316164] env[69927]: _type = "Task" [ 1156.316164] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.325151] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52784458-90c1-2f0c-69ee-1b53bf7ae4d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.468491] env[69927]: INFO nova.compute.manager [-] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Took 1.42 seconds to deallocate network for instance. [ 1156.533011] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096655, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.565280] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.459s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1156.566545] env[69927]: DEBUG nova.compute.manager [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1156.569793] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.156s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1156.571623] env[69927]: INFO nova.compute.claims [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1156.716801] env[69927]: DEBUG oslo_vmware.api [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096656, 'name': PowerOffVM_Task, 'duration_secs': 0.357066} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.718599] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1156.718599] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1156.718599] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-665abf1b-5ed9-4945-8a9d-5da96be8e2c2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.815661] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1156.815907] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1156.816109] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Deleting the datastore file [datastore1] 406828cc-c6aa-4686-827d-c7c8e28ffb8e {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1156.817785] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dcdef833-0d97-4942-b625-f6240d74cd59 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.820850] env[69927]: DEBUG nova.compute.manager [req-0144a9c1-170b-4789-a966-49db28f472bd req-1595ff0d-a132-4c25-a0e4-aa1f2cb29ea0 service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Received event network-vif-plugged-b92f830b-5eef-4260-a56b-94af4a4ec679 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1156.821072] env[69927]: DEBUG oslo_concurrency.lockutils [req-0144a9c1-170b-4789-a966-49db28f472bd req-1595ff0d-a132-4c25-a0e4-aa1f2cb29ea0 service nova] Acquiring lock "d548ea75-9c1f-4884-b338-194f1b5d62ef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1156.821286] env[69927]: DEBUG oslo_concurrency.lockutils [req-0144a9c1-170b-4789-a966-49db28f472bd req-1595ff0d-a132-4c25-a0e4-aa1f2cb29ea0 service nova] Lock "d548ea75-9c1f-4884-b338-194f1b5d62ef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1156.821454] env[69927]: DEBUG oslo_concurrency.lockutils [req-0144a9c1-170b-4789-a966-49db28f472bd req-1595ff0d-a132-4c25-a0e4-aa1f2cb29ea0 service nova] Lock "d548ea75-9c1f-4884-b338-194f1b5d62ef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1156.821617] env[69927]: DEBUG nova.compute.manager [req-0144a9c1-170b-4789-a966-49db28f472bd req-1595ff0d-a132-4c25-a0e4-aa1f2cb29ea0 service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] No waiting events found dispatching network-vif-plugged-b92f830b-5eef-4260-a56b-94af4a4ec679 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1156.821783] env[69927]: WARNING nova.compute.manager [req-0144a9c1-170b-4789-a966-49db28f472bd req-1595ff0d-a132-4c25-a0e4-aa1f2cb29ea0 service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Received unexpected event network-vif-plugged-b92f830b-5eef-4260-a56b-94af4a4ec679 for instance with vm_state building and task_state spawning. [ 1156.833362] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52784458-90c1-2f0c-69ee-1b53bf7ae4d0, 'name': SearchDatastore_Task, 'duration_secs': 0.010431} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.835319] env[69927]: DEBUG oslo_vmware.api [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for the task: (returnval){ [ 1156.835319] env[69927]: value = "task-4096658" [ 1156.835319] env[69927]: _type = "Task" [ 1156.835319] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.835528] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a5063e4-d940-42b5-9040-a72b2761b2b4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.846860] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Waiting for the task: (returnval){ [ 1156.846860] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528353f6-b6b4-6dec-de3c-8b9a363308d1" [ 1156.846860] env[69927]: _type = "Task" [ 1156.846860] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.850428] env[69927]: DEBUG oslo_vmware.api [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096658, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.860763] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528353f6-b6b4-6dec-de3c-8b9a363308d1, 'name': SearchDatastore_Task, 'duration_secs': 0.010603} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.861057] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1156.861337] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 7ff17f1d-31fd-440b-906c-2719770a9151/7ff17f1d-31fd-440b-906c-2719770a9151.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1156.861613] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-55ca1094-0364-42f4-a8e9-b0fee9b2f9bb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.868832] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Waiting for the task: (returnval){ [ 1156.868832] env[69927]: value = "task-4096659" [ 1156.868832] env[69927]: _type = "Task" [ 1156.868832] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.877769] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': task-4096659, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.910809] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a346191-09c6-4eb2-bfe0-be075f801481 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1156.911153] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b23ffe37-4c6c-415c-b2c1-135d7daf3c8a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.916650] env[69927]: DEBUG nova.network.neutron [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Successfully updated port: b92f830b-5eef-4260-a56b-94af4a4ec679 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1156.919589] env[69927]: DEBUG oslo_vmware.api [None req-8a346191-09c6-4eb2-bfe0-be075f801481 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1156.919589] env[69927]: value = "task-4096660" [ 1156.919589] env[69927]: _type = "Task" [ 1156.919589] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.935357] env[69927]: DEBUG oslo_vmware.api [None req-8a346191-09c6-4eb2-bfe0-be075f801481 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096660, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.974868] env[69927]: DEBUG oslo_concurrency.lockutils [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1157.026595] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096655, 'name': ReconfigVM_Task, 'duration_secs': 0.641762} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.026880] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Reconfigured VM instance instance-00000062 to attach disk [datastore2] 693a6c6b-8d1c-405e-bb17-73259e28f556/693a6c6b-8d1c-405e-bb17-73259e28f556.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1157.027599] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0f43f1ba-16d2-42b3-a510-80d9792f7312 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.035719] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1157.035719] env[69927]: value = "task-4096661" [ 1157.035719] env[69927]: _type = "Task" [ 1157.035719] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.049713] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096661, 'name': Rename_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.079772] env[69927]: DEBUG nova.compute.utils [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1157.081307] env[69927]: DEBUG nova.compute.manager [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1157.081463] env[69927]: DEBUG nova.network.neutron [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1157.119085] env[69927]: DEBUG nova.policy [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '76414b2ae1aa4ab582c2b59fd4218005', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '544f2a021144492ba1aea46ce6075e53', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1157.266852] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1157.267263] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f8d9cd8c-6048-4a0c-a25c-13fcb5c4eb67 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.276406] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1157.276406] env[69927]: value = "task-4096662" [ 1157.276406] env[69927]: _type = "Task" [ 1157.276406] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.289505] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096662, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.351111] env[69927]: DEBUG oslo_vmware.api [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Task: {'id': task-4096658, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.252551} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.351611] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1157.351915] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1157.352258] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1157.352549] env[69927]: INFO nova.compute.manager [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1157.352916] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1157.353672] env[69927]: DEBUG nova.compute.manager [-] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1157.353672] env[69927]: DEBUG nova.network.neutron [-] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1157.386827] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': task-4096659, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.421485] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.421694] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1157.421958] env[69927]: DEBUG nova.network.neutron [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1157.437518] env[69927]: DEBUG oslo_vmware.api [None req-8a346191-09c6-4eb2-bfe0-be075f801481 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096660, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.465451] env[69927]: DEBUG nova.network.neutron [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Successfully created port: 6252a6ad-27a0-47fd-8f00-60c71ed01985 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1157.549698] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096661, 'name': Rename_Task, 'duration_secs': 0.396664} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.550057] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1157.550327] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ca631821-67b2-4422-a9be-d40bbe6adb13 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.561635] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1157.561635] env[69927]: value = "task-4096663" [ 1157.561635] env[69927]: _type = "Task" [ 1157.561635] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.574733] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096663, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.589939] env[69927]: DEBUG nova.compute.manager [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1157.790735] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096662, 'name': PowerOffVM_Task, 'duration_secs': 0.229826} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.793759] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1157.794783] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fdd0048-5cdc-493f-be55-e02960bcb1f8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.817299] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-811ece6c-aa7a-4142-a2dc-1fbbfdab3ce5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.853389] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1157.853690] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf93cc7e-de2d-43f3-be54-77f1f426890d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.863754] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1157.863754] env[69927]: value = "task-4096664" [ 1157.863754] env[69927]: _type = "Task" [ 1157.863754] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.876515] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096664, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.882180] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': task-4096659, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.73723} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.882437] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 7ff17f1d-31fd-440b-906c-2719770a9151/7ff17f1d-31fd-440b-906c-2719770a9151.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1157.882650] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1157.882893] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ce4392af-ecf8-41f4-b8a6-7e4244c1aca5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.890013] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Waiting for the task: (returnval){ [ 1157.890013] env[69927]: value = "task-4096665" [ 1157.890013] env[69927]: _type = "Task" [ 1157.890013] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.901165] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': task-4096665, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.934583] env[69927]: DEBUG oslo_vmware.api [None req-8a346191-09c6-4eb2-bfe0-be075f801481 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096660, 'name': PowerOnVM_Task, 'duration_secs': 0.869876} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.937658] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a346191-09c6-4eb2-bfe0-be075f801481 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1157.937868] env[69927]: DEBUG nova.compute.manager [None req-8a346191-09c6-4eb2-bfe0-be075f801481 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1157.938907] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e77f6f42-b2f8-485e-8291-1637447dc704 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.970587] env[69927]: DEBUG nova.network.neutron [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1157.983628] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b389c033-d021-4ae5-a72e-9c6f97d53fc4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.995576] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb136d80-fe8c-4808-978f-46f8ada7c0c0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.034392] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e24fa050-54ae-4ef9-aa4d-555d575e8453 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.042869] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68ec7e6d-8fa7-4ab0-9487-fbb5774829be {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.061574] env[69927]: DEBUG nova.compute.provider_tree [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1158.076410] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096663, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.132178] env[69927]: DEBUG nova.network.neutron [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Updating instance_info_cache with network_info: [{"id": "b92f830b-5eef-4260-a56b-94af4a4ec679", "address": "fa:16:3e:a4:e7:1b", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb92f830b-5e", "ovs_interfaceid": "b92f830b-5eef-4260-a56b-94af4a4ec679", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.328379] env[69927]: DEBUG nova.compute.manager [req-526182a8-015f-4631-8863-0357431a0313 req-6eea134a-c0c5-468f-91c2-84ae2b745bf5 service nova] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Received event network-vif-deleted-7e7e497c-f90e-44b0-a0dc-e400b4b57c0c {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1158.328379] env[69927]: INFO nova.compute.manager [req-526182a8-015f-4631-8863-0357431a0313 req-6eea134a-c0c5-468f-91c2-84ae2b745bf5 service nova] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Neutron deleted interface 7e7e497c-f90e-44b0-a0dc-e400b4b57c0c; detaching it from the instance and deleting it from the info cache [ 1158.328379] env[69927]: DEBUG nova.network.neutron [req-526182a8-015f-4631-8863-0357431a0313 req-6eea134a-c0c5-468f-91c2-84ae2b745bf5 service nova] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.377127] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] VM already powered off {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1158.377127] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1158.377127] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1158.377127] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1158.377127] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1158.377466] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cdd8c0a5-dcf6-422d-aa23-e750487fafe1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.388828] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1158.389204] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1158.389957] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e6304a7-e995-458f-9061-dfc5685d4e56 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.400936] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1158.400936] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5203aa72-95c4-61a4-dca1-d54f07f9e38c" [ 1158.400936] env[69927]: _type = "Task" [ 1158.400936] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.405886] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': task-4096665, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073377} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.411124] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1158.412495] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96b46d9f-57b3-4e8e-a583-47817110e499 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.422433] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5203aa72-95c4-61a4-dca1-d54f07f9e38c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.440981] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] 7ff17f1d-31fd-440b-906c-2719770a9151/7ff17f1d-31fd-440b-906c-2719770a9151.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1158.441417] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0bc74df7-7add-4820-8d86-1676a59a8a74 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.465319] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Waiting for the task: (returnval){ [ 1158.465319] env[69927]: value = "task-4096666" [ 1158.465319] env[69927]: _type = "Task" [ 1158.465319] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.477801] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': task-4096666, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.568525] env[69927]: DEBUG nova.scheduler.client.report [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1158.579766] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096663, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.600931] env[69927]: DEBUG nova.compute.manager [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1158.633176] env[69927]: DEBUG nova.virt.hardware [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1158.633534] env[69927]: DEBUG nova.virt.hardware [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1158.633715] env[69927]: DEBUG nova.virt.hardware [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1158.633899] env[69927]: DEBUG nova.virt.hardware [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1158.634057] env[69927]: DEBUG nova.virt.hardware [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1158.634228] env[69927]: DEBUG nova.virt.hardware [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1158.634483] env[69927]: DEBUG nova.virt.hardware [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1158.634681] env[69927]: DEBUG nova.virt.hardware [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1158.634910] env[69927]: DEBUG nova.virt.hardware [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1158.635110] env[69927]: DEBUG nova.virt.hardware [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1158.635308] env[69927]: DEBUG nova.virt.hardware [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1158.635839] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1158.636141] env[69927]: DEBUG nova.compute.manager [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Instance network_info: |[{"id": "b92f830b-5eef-4260-a56b-94af4a4ec679", "address": "fa:16:3e:a4:e7:1b", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb92f830b-5e", "ovs_interfaceid": "b92f830b-5eef-4260-a56b-94af4a4ec679", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1158.637036] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c5163b-e3d1-41f7-8b78-4d2facddb82a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.640147] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:e7:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5b21ab10-d886-4453-9472-9e11fb3c450d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b92f830b-5eef-4260-a56b-94af4a4ec679', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1158.649101] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1158.649454] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1158.650315] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3d770c74-4db0-4264-8c1b-166e2fad6239 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.669649] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304fb83e-a8d7-4d1d-951c-3f9fff8c655d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.675542] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1158.675542] env[69927]: value = "task-4096667" [ 1158.675542] env[69927]: _type = "Task" [ 1158.675542] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.693611] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096667, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.777582] env[69927]: DEBUG nova.network.neutron [-] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.835552] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f583888d-52d9-4f45-825b-44b747ed3f51 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.847057] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26445a25-f43b-4b98-bdf9-377d53c53686 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.860863] env[69927]: DEBUG nova.compute.manager [req-8ea9bcd2-9edf-4d65-8338-a2fc842496ec req-72e500e2-10c2-4d1f-83a6-348ea3e16b1d service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Received event network-changed-b92f830b-5eef-4260-a56b-94af4a4ec679 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1158.861127] env[69927]: DEBUG nova.compute.manager [req-8ea9bcd2-9edf-4d65-8338-a2fc842496ec req-72e500e2-10c2-4d1f-83a6-348ea3e16b1d service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Refreshing instance network info cache due to event network-changed-b92f830b-5eef-4260-a56b-94af4a4ec679. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1158.861288] env[69927]: DEBUG oslo_concurrency.lockutils [req-8ea9bcd2-9edf-4d65-8338-a2fc842496ec req-72e500e2-10c2-4d1f-83a6-348ea3e16b1d service nova] Acquiring lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1158.861470] env[69927]: DEBUG oslo_concurrency.lockutils [req-8ea9bcd2-9edf-4d65-8338-a2fc842496ec req-72e500e2-10c2-4d1f-83a6-348ea3e16b1d service nova] Acquired lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1158.861639] env[69927]: DEBUG nova.network.neutron [req-8ea9bcd2-9edf-4d65-8338-a2fc842496ec req-72e500e2-10c2-4d1f-83a6-348ea3e16b1d service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Refreshing network info cache for port b92f830b-5eef-4260-a56b-94af4a4ec679 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1158.892333] env[69927]: DEBUG nova.compute.manager [req-526182a8-015f-4631-8863-0357431a0313 req-6eea134a-c0c5-468f-91c2-84ae2b745bf5 service nova] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Detach interface failed, port_id=7e7e497c-f90e-44b0-a0dc-e400b4b57c0c, reason: Instance 406828cc-c6aa-4686-827d-c7c8e28ffb8e could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1158.917647] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5203aa72-95c4-61a4-dca1-d54f07f9e38c, 'name': SearchDatastore_Task, 'duration_secs': 0.022392} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.918974] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ff2d91e-5e7c-4526-842b-a6e621b20a25 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.928735] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1158.928735] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523fbe74-038d-b663-8d71-ca009c5e12b6" [ 1158.928735] env[69927]: _type = "Task" [ 1158.928735] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.941773] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523fbe74-038d-b663-8d71-ca009c5e12b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.975912] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': task-4096666, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.076779] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.506s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.077168] env[69927]: DEBUG nova.compute.manager [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1159.080981] env[69927]: DEBUG oslo_vmware.api [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096663, 'name': PowerOnVM_Task, 'duration_secs': 1.21377} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.081795] env[69927]: DEBUG nova.network.neutron [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Successfully updated port: 6252a6ad-27a0-47fd-8f00-60c71ed01985 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1159.082971] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.800s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1159.085410] env[69927]: INFO nova.compute.claims [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1159.088872] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1159.089264] env[69927]: INFO nova.compute.manager [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Took 10.07 seconds to spawn the instance on the hypervisor. [ 1159.089853] env[69927]: DEBUG nova.compute.manager [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1159.090907] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d7e0ba-5720-40e9-afc1-eb26d60197ea {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.185850] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096667, 'name': CreateVM_Task, 'duration_secs': 0.492619} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.186055] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1159.186791] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.186961] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1159.187311] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1159.187604] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3bfd35d-c205-4e70-b3e7-04fe9d05c15c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.193807] env[69927]: DEBUG oslo_vmware.api [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1159.193807] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52c87dfd-7f1d-730c-4916-a7a4a7b0d235" [ 1159.193807] env[69927]: _type = "Task" [ 1159.193807] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.203276] env[69927]: DEBUG oslo_vmware.api [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52c87dfd-7f1d-730c-4916-a7a4a7b0d235, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.280619] env[69927]: INFO nova.compute.manager [-] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Took 1.93 seconds to deallocate network for instance. [ 1159.369817] env[69927]: DEBUG oslo_concurrency.lockutils [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "4b7934f8-2c97-480b-8af7-f09f6819e2b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1159.370149] env[69927]: DEBUG oslo_concurrency.lockutils [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "4b7934f8-2c97-480b-8af7-f09f6819e2b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1159.370389] env[69927]: DEBUG oslo_concurrency.lockutils [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "4b7934f8-2c97-480b-8af7-f09f6819e2b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1159.370596] env[69927]: DEBUG oslo_concurrency.lockutils [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "4b7934f8-2c97-480b-8af7-f09f6819e2b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1159.370787] env[69927]: DEBUG oslo_concurrency.lockutils [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "4b7934f8-2c97-480b-8af7-f09f6819e2b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.373813] env[69927]: INFO nova.compute.manager [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Terminating instance [ 1159.440493] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]523fbe74-038d-b663-8d71-ca009c5e12b6, 'name': SearchDatastore_Task, 'duration_secs': 0.017674} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.440694] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1159.440955] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 15c44d86-829f-4317-ab66-9e61d4fb4dd0/f524494e-9179-4b3e-a3e2-782f019def24-rescue.vmdk. {{(pid=69927) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1159.441317] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-200c6119-9068-4be8-bad6-1d0a4af4d342 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.452133] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1159.452133] env[69927]: value = "task-4096668" [ 1159.452133] env[69927]: _type = "Task" [ 1159.452133] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.462247] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096668, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.475482] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': task-4096666, 'name': ReconfigVM_Task, 'duration_secs': 0.685144} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.475799] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Reconfigured VM instance instance-00000063 to attach disk [datastore2] 7ff17f1d-31fd-440b-906c-2719770a9151/7ff17f1d-31fd-440b-906c-2719770a9151.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1159.476510] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-682a3d0e-2d4d-464c-b5a2-7bf91c2d343f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.483128] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Waiting for the task: (returnval){ [ 1159.483128] env[69927]: value = "task-4096669" [ 1159.483128] env[69927]: _type = "Task" [ 1159.483128] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.499237] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': task-4096669, 'name': Rename_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.582402] env[69927]: DEBUG nova.compute.utils [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1159.583864] env[69927]: DEBUG nova.compute.manager [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1159.584061] env[69927]: DEBUG nova.network.neutron [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1159.591780] env[69927]: DEBUG nova.network.neutron [req-8ea9bcd2-9edf-4d65-8338-a2fc842496ec req-72e500e2-10c2-4d1f-83a6-348ea3e16b1d service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Updated VIF entry in instance network info cache for port b92f830b-5eef-4260-a56b-94af4a4ec679. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1159.592189] env[69927]: DEBUG nova.network.neutron [req-8ea9bcd2-9edf-4d65-8338-a2fc842496ec req-72e500e2-10c2-4d1f-83a6-348ea3e16b1d service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Updating instance_info_cache with network_info: [{"id": "b92f830b-5eef-4260-a56b-94af4a4ec679", "address": "fa:16:3e:a4:e7:1b", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb92f830b-5e", "ovs_interfaceid": "b92f830b-5eef-4260-a56b-94af4a4ec679", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1159.593507] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "refresh_cache-ba7989a1-a644-4eb7-bf65-20ca1810dd62" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.593654] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired lock "refresh_cache-ba7989a1-a644-4eb7-bf65-20ca1810dd62" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1159.593797] env[69927]: DEBUG nova.network.neutron [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1159.611351] env[69927]: INFO nova.compute.manager [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Took 21.06 seconds to build instance. [ 1159.655488] env[69927]: DEBUG nova.policy [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d854e5435164764b5b69b9c7262398f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dee421c661394f3fbf8d69a575f095a9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1159.710023] env[69927]: DEBUG oslo_vmware.api [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52c87dfd-7f1d-730c-4916-a7a4a7b0d235, 'name': SearchDatastore_Task, 'duration_secs': 0.039609} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.710023] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1159.710023] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1159.710023] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.710023] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1159.710023] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1159.710023] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6405bbd-9514-4d5c-ad12-56ad56056fa0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.722082] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1159.722297] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1159.723076] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d4236d7-89ef-4bb1-9570-dfd8d906c575 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.729558] env[69927]: DEBUG oslo_vmware.api [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1159.729558] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e0ab19-2e86-f02b-b413-ad53492ad8a2" [ 1159.729558] env[69927]: _type = "Task" [ 1159.729558] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.738424] env[69927]: DEBUG oslo_vmware.api [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e0ab19-2e86-f02b-b413-ad53492ad8a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.788995] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1159.882334] env[69927]: DEBUG nova.compute.manager [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1159.882643] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1159.883704] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db42849b-46f7-4345-86d9-f5e1a022062f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.894380] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1159.894791] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dbcdbba3-ce01-45ac-a26e-56f25ef95dfb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.903683] env[69927]: DEBUG oslo_vmware.api [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1159.903683] env[69927]: value = "task-4096670" [ 1159.903683] env[69927]: _type = "Task" [ 1159.903683] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.915796] env[69927]: DEBUG oslo_vmware.api [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096670, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.963446] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096668, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.995024] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': task-4096669, 'name': Rename_Task, 'duration_secs': 0.222443} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.995024] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1159.995224] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d57a1777-40e1-4db3-b886-681fd09d0581 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.005212] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Waiting for the task: (returnval){ [ 1160.005212] env[69927]: value = "task-4096671" [ 1160.005212] env[69927]: _type = "Task" [ 1160.005212] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.016220] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': task-4096671, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.087273] env[69927]: DEBUG nova.compute.manager [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1160.095711] env[69927]: DEBUG oslo_concurrency.lockutils [req-8ea9bcd2-9edf-4d65-8338-a2fc842496ec req-72e500e2-10c2-4d1f-83a6-348ea3e16b1d service nova] Releasing lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1160.113966] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8215163e-f30c-4c36-9d64-cc148431c74d tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "693a6c6b-8d1c-405e-bb17-73259e28f556" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.578s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1160.134107] env[69927]: DEBUG nova.network.neutron [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1160.246128] env[69927]: DEBUG oslo_vmware.api [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e0ab19-2e86-f02b-b413-ad53492ad8a2, 'name': SearchDatastore_Task, 'duration_secs': 0.024312} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.251685] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee622597-7ef7-4591-96d8-f97025aa2bff {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.260242] env[69927]: DEBUG oslo_vmware.api [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1160.260242] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526e8753-d20d-4ccf-8ccc-ed1686ed38d8" [ 1160.260242] env[69927]: _type = "Task" [ 1160.260242] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.272767] env[69927]: DEBUG oslo_vmware.api [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526e8753-d20d-4ccf-8ccc-ed1686ed38d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.329300] env[69927]: DEBUG nova.network.neutron [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Updating instance_info_cache with network_info: [{"id": "6252a6ad-27a0-47fd-8f00-60c71ed01985", "address": "fa:16:3e:20:fe:e5", "network": {"id": "5e9a6068-d0f1-44b2-b2a2-b87ebc0d53f2", "bridge": "br-int", "label": "tempest-ServersTestJSON-2067990706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "544f2a021144492ba1aea46ce6075e53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6252a6ad-27", "ovs_interfaceid": "6252a6ad-27a0-47fd-8f00-60c71ed01985", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1160.402437] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c35cdd52-c6eb-40d1-9709-04c9bdd3008f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.418573] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0583e5-725c-4d14-9655-c714180de612 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.426022] env[69927]: DEBUG oslo_vmware.api [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096670, 'name': PowerOffVM_Task, 'duration_secs': 0.395346} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.426022] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1160.426022] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1160.426022] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-00a373fb-cadd-4b1b-bd8b-a99cc120557b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.469788] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8bbe11c-77b3-4e37-9ce1-4c025ded57e7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.477801] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096668, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.690115} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.479985] env[69927]: INFO nova.virt.vmwareapi.ds_util [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 15c44d86-829f-4317-ab66-9e61d4fb4dd0/f524494e-9179-4b3e-a3e2-782f019def24-rescue.vmdk. [ 1160.480885] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff0cae1-811c-4a5d-af11-699396487c42 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.484269] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b93881a6-64f4-4fbc-bb70-69162f60a990 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.513833] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 15c44d86-829f-4317-ab66-9e61d4fb4dd0/f524494e-9179-4b3e-a3e2-782f019def24-rescue.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1160.527692] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e98a2f42-e91d-4648-8a7b-dff2f1caa210 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.541092] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1160.541445] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1160.541545] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Deleting the datastore file [datastore2] 4b7934f8-2c97-480b-8af7-f09f6819e2b6 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1160.542132] env[69927]: DEBUG nova.compute.provider_tree [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1160.543562] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-883143ad-2168-4e24-9b9b-b620d981430b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.555138] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': task-4096671, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.555138] env[69927]: DEBUG oslo_vmware.api [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1160.555138] env[69927]: value = "task-4096674" [ 1160.555138] env[69927]: _type = "Task" [ 1160.555138] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.555138] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1160.555138] env[69927]: value = "task-4096673" [ 1160.555138] env[69927]: _type = "Task" [ 1160.555138] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.571280] env[69927]: DEBUG oslo_vmware.api [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096674, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.571546] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096673, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.690525] env[69927]: DEBUG nova.network.neutron [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Successfully created port: 47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1160.772510] env[69927]: DEBUG oslo_vmware.api [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526e8753-d20d-4ccf-8ccc-ed1686ed38d8, 'name': SearchDatastore_Task, 'duration_secs': 0.01508} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.772873] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1160.773238] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] d548ea75-9c1f-4884-b338-194f1b5d62ef/d548ea75-9c1f-4884-b338-194f1b5d62ef.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1160.773583] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0e99a08b-dc40-466c-b606-2ef7ae9fcdc1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.784195] env[69927]: DEBUG oslo_vmware.api [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1160.784195] env[69927]: value = "task-4096675" [ 1160.784195] env[69927]: _type = "Task" [ 1160.784195] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.793826] env[69927]: DEBUG oslo_vmware.api [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096675, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.837607] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Releasing lock "refresh_cache-ba7989a1-a644-4eb7-bf65-20ca1810dd62" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1160.837607] env[69927]: DEBUG nova.compute.manager [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Instance network_info: |[{"id": "6252a6ad-27a0-47fd-8f00-60c71ed01985", "address": "fa:16:3e:20:fe:e5", "network": {"id": "5e9a6068-d0f1-44b2-b2a2-b87ebc0d53f2", "bridge": "br-int", "label": "tempest-ServersTestJSON-2067990706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "544f2a021144492ba1aea46ce6075e53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6252a6ad-27", "ovs_interfaceid": "6252a6ad-27a0-47fd-8f00-60c71ed01985", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1160.838023] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:fe:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ed8a78a1-87dc-488e-a092-afd1c2a2ddde', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6252a6ad-27a0-47fd-8f00-60c71ed01985', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1160.845895] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1160.846122] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1160.846303] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-15fa6cd3-c9ab-47ba-bb54-09d79734100f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.869109] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1160.869109] env[69927]: value = "task-4096676" [ 1160.869109] env[69927]: _type = "Task" [ 1160.869109] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.879300] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096676, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.999810] env[69927]: DEBUG nova.compute.manager [req-4cd828b5-c1e8-4a0b-9774-53413359bf1b req-082296e7-b8c0-42c5-b1b7-6f58615a2292 service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Received event network-changed-d201dadc-ab89-4ede-8c29-41217e3af341 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1161.000103] env[69927]: DEBUG nova.compute.manager [req-4cd828b5-c1e8-4a0b-9774-53413359bf1b req-082296e7-b8c0-42c5-b1b7-6f58615a2292 service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Refreshing instance network info cache due to event network-changed-d201dadc-ab89-4ede-8c29-41217e3af341. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1161.000356] env[69927]: DEBUG oslo_concurrency.lockutils [req-4cd828b5-c1e8-4a0b-9774-53413359bf1b req-082296e7-b8c0-42c5-b1b7-6f58615a2292 service nova] Acquiring lock "refresh_cache-693a6c6b-8d1c-405e-bb17-73259e28f556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.000615] env[69927]: DEBUG oslo_concurrency.lockutils [req-4cd828b5-c1e8-4a0b-9774-53413359bf1b req-082296e7-b8c0-42c5-b1b7-6f58615a2292 service nova] Acquired lock "refresh_cache-693a6c6b-8d1c-405e-bb17-73259e28f556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1161.001295] env[69927]: DEBUG nova.network.neutron [req-4cd828b5-c1e8-4a0b-9774-53413359bf1b req-082296e7-b8c0-42c5-b1b7-6f58615a2292 service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Refreshing network info cache for port d201dadc-ab89-4ede-8c29-41217e3af341 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1161.023238] env[69927]: DEBUG oslo_vmware.api [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': task-4096671, 'name': PowerOnVM_Task, 'duration_secs': 0.821765} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.023501] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1161.023723] env[69927]: INFO nova.compute.manager [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Took 9.60 seconds to spawn the instance on the hypervisor. [ 1161.024589] env[69927]: DEBUG nova.compute.manager [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1161.026107] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6ac922-ea58-44bd-9046-349b40c7b870 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.031188] env[69927]: DEBUG nova.compute.manager [req-1d3c8c1f-5872-44b6-869b-83c9fc11dafb req-9abc54a3-1a15-4f8b-a7ae-2b824437b68b service nova] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Received event network-vif-plugged-6252a6ad-27a0-47fd-8f00-60c71ed01985 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1161.034021] env[69927]: DEBUG oslo_concurrency.lockutils [req-1d3c8c1f-5872-44b6-869b-83c9fc11dafb req-9abc54a3-1a15-4f8b-a7ae-2b824437b68b service nova] Acquiring lock "ba7989a1-a644-4eb7-bf65-20ca1810dd62-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.034021] env[69927]: DEBUG oslo_concurrency.lockutils [req-1d3c8c1f-5872-44b6-869b-83c9fc11dafb req-9abc54a3-1a15-4f8b-a7ae-2b824437b68b service nova] Lock "ba7989a1-a644-4eb7-bf65-20ca1810dd62-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.034021] env[69927]: DEBUG oslo_concurrency.lockutils [req-1d3c8c1f-5872-44b6-869b-83c9fc11dafb req-9abc54a3-1a15-4f8b-a7ae-2b824437b68b service nova] Lock "ba7989a1-a644-4eb7-bf65-20ca1810dd62-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.034021] env[69927]: DEBUG nova.compute.manager [req-1d3c8c1f-5872-44b6-869b-83c9fc11dafb req-9abc54a3-1a15-4f8b-a7ae-2b824437b68b service nova] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] No waiting events found dispatching network-vif-plugged-6252a6ad-27a0-47fd-8f00-60c71ed01985 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1161.034021] env[69927]: WARNING nova.compute.manager [req-1d3c8c1f-5872-44b6-869b-83c9fc11dafb req-9abc54a3-1a15-4f8b-a7ae-2b824437b68b service nova] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Received unexpected event network-vif-plugged-6252a6ad-27a0-47fd-8f00-60c71ed01985 for instance with vm_state building and task_state spawning. [ 1161.034021] env[69927]: DEBUG nova.compute.manager [req-1d3c8c1f-5872-44b6-869b-83c9fc11dafb req-9abc54a3-1a15-4f8b-a7ae-2b824437b68b service nova] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Received event network-changed-6252a6ad-27a0-47fd-8f00-60c71ed01985 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1161.034021] env[69927]: DEBUG nova.compute.manager [req-1d3c8c1f-5872-44b6-869b-83c9fc11dafb req-9abc54a3-1a15-4f8b-a7ae-2b824437b68b service nova] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Refreshing instance network info cache due to event network-changed-6252a6ad-27a0-47fd-8f00-60c71ed01985. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1161.034021] env[69927]: DEBUG oslo_concurrency.lockutils [req-1d3c8c1f-5872-44b6-869b-83c9fc11dafb req-9abc54a3-1a15-4f8b-a7ae-2b824437b68b service nova] Acquiring lock "refresh_cache-ba7989a1-a644-4eb7-bf65-20ca1810dd62" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.034021] env[69927]: DEBUG oslo_concurrency.lockutils [req-1d3c8c1f-5872-44b6-869b-83c9fc11dafb req-9abc54a3-1a15-4f8b-a7ae-2b824437b68b service nova] Acquired lock "refresh_cache-ba7989a1-a644-4eb7-bf65-20ca1810dd62" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1161.034021] env[69927]: DEBUG nova.network.neutron [req-1d3c8c1f-5872-44b6-869b-83c9fc11dafb req-9abc54a3-1a15-4f8b-a7ae-2b824437b68b service nova] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Refreshing network info cache for port 6252a6ad-27a0-47fd-8f00-60c71ed01985 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1161.048401] env[69927]: DEBUG nova.scheduler.client.report [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1161.069812] env[69927]: DEBUG oslo_vmware.api [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096674, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.214715} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.073018] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1161.073259] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1161.073440] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1161.073631] env[69927]: INFO nova.compute.manager [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1161.073890] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1161.075261] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096673, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.075261] env[69927]: DEBUG nova.compute.manager [-] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1161.075261] env[69927]: DEBUG nova.network.neutron [-] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1161.100556] env[69927]: DEBUG nova.compute.manager [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1161.134527] env[69927]: DEBUG nova.virt.hardware [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1161.134806] env[69927]: DEBUG nova.virt.hardware [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1161.134967] env[69927]: DEBUG nova.virt.hardware [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1161.135198] env[69927]: DEBUG nova.virt.hardware [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1161.135322] env[69927]: DEBUG nova.virt.hardware [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1161.135473] env[69927]: DEBUG nova.virt.hardware [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1161.135707] env[69927]: DEBUG nova.virt.hardware [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1161.136352] env[69927]: DEBUG nova.virt.hardware [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1161.136352] env[69927]: DEBUG nova.virt.hardware [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1161.136352] env[69927]: DEBUG nova.virt.hardware [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1161.136548] env[69927]: DEBUG nova.virt.hardware [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1161.137384] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46187986-7973-48d6-aa6e-860357cacf80 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.148584] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec64838f-bf73-4239-83ca-e8c6a145ddd9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.302476] env[69927]: DEBUG oslo_vmware.api [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096675, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.379462] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096676, 'name': CreateVM_Task, 'duration_secs': 0.403909} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.379651] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1161.380398] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.380564] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1161.381429] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1161.381429] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf0d8ea3-57d7-4b9b-8745-0a19ba14a939 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.386930] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1161.386930] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52629f1c-7848-d1ad-ce25-1b031e6aa915" [ 1161.386930] env[69927]: _type = "Task" [ 1161.386930] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.396662] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52629f1c-7848-d1ad-ce25-1b031e6aa915, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.557309] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.473s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.557309] env[69927]: DEBUG nova.compute.manager [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1161.563588] env[69927]: INFO nova.compute.manager [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Took 20.90 seconds to build instance. [ 1161.568026] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.318s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.568447] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.570834] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.334s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.571226] env[69927]: DEBUG nova.objects.instance [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lazy-loading 'resources' on Instance uuid cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1161.584517] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096673, 'name': ReconfigVM_Task, 'duration_secs': 0.780023} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.585293] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 15c44d86-829f-4317-ab66-9e61d4fb4dd0/f524494e-9179-4b3e-a3e2-782f019def24-rescue.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1161.589022] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aeae539-57dc-4a65-9769-179336d4b002 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.619375] env[69927]: INFO nova.scheduler.client.report [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Deleted allocations for instance 9aa0a285-66e4-4792-bbe9-a62f76666ec6 [ 1161.625025] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51bbaea2-2d44-4501-8131-70d1c8d19695 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.649033] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1161.649033] env[69927]: value = "task-4096677" [ 1161.649033] env[69927]: _type = "Task" [ 1161.649033] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.658071] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096677, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.799221] env[69927]: DEBUG oslo_vmware.api [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096675, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.767714} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.799679] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] d548ea75-9c1f-4884-b338-194f1b5d62ef/d548ea75-9c1f-4884-b338-194f1b5d62ef.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1161.800068] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1161.800533] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-53ffe0c3-fbab-42f5-a1a8-749de18794b2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.813024] env[69927]: DEBUG oslo_vmware.api [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1161.813024] env[69927]: value = "task-4096678" [ 1161.813024] env[69927]: _type = "Task" [ 1161.813024] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.821590] env[69927]: DEBUG oslo_vmware.api [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096678, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.899769] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52629f1c-7848-d1ad-ce25-1b031e6aa915, 'name': SearchDatastore_Task, 'duration_secs': 0.026944} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.900328] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1161.900740] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1161.901137] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.901484] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1161.902349] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1161.902774] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bde70c13-ec43-438b-8290-b8fbf5b87728 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.918154] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1161.918154] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1161.920282] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44af6f6e-fd0c-4543-84d6-5dc75a21661d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.927015] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1161.927015] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5241b028-f6f0-c235-ce02-0006854f7bc6" [ 1161.927015] env[69927]: _type = "Task" [ 1161.927015] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.937501] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5241b028-f6f0-c235-ce02-0006854f7bc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.062591] env[69927]: DEBUG nova.network.neutron [req-4cd828b5-c1e8-4a0b-9774-53413359bf1b req-082296e7-b8c0-42c5-b1b7-6f58615a2292 service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Updated VIF entry in instance network info cache for port d201dadc-ab89-4ede-8c29-41217e3af341. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1162.062994] env[69927]: DEBUG nova.network.neutron [req-4cd828b5-c1e8-4a0b-9774-53413359bf1b req-082296e7-b8c0-42c5-b1b7-6f58615a2292 service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Updating instance_info_cache with network_info: [{"id": "d201dadc-ab89-4ede-8c29-41217e3af341", "address": "fa:16:3e:6b:c9:4b", "network": {"id": "4a19c4d6-f840-486a-b113-3ec076b5b34d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-437129762-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.157", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b7ae5270b0643e6b5720d4f2f765d74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "510d3c47-3615-43d5-aa5d-a279fd915e71", "external-id": "nsx-vlan-transportzone-436", "segmentation_id": 436, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd201dadc-ab", "ovs_interfaceid": "d201dadc-ab89-4ede-8c29-41217e3af341", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1162.075407] env[69927]: DEBUG nova.compute.utils [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1162.076891] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9fa9fc20-4534-4bd1-b786-05c2acb6f9db tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Lock "7ff17f1d-31fd-440b-906c-2719770a9151" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.418s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1162.077464] env[69927]: DEBUG nova.objects.instance [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lazy-loading 'numa_topology' on Instance uuid cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1162.079333] env[69927]: DEBUG nova.network.neutron [req-1d3c8c1f-5872-44b6-869b-83c9fc11dafb req-9abc54a3-1a15-4f8b-a7ae-2b824437b68b service nova] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Updated VIF entry in instance network info cache for port 6252a6ad-27a0-47fd-8f00-60c71ed01985. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1162.079498] env[69927]: DEBUG nova.network.neutron [req-1d3c8c1f-5872-44b6-869b-83c9fc11dafb req-9abc54a3-1a15-4f8b-a7ae-2b824437b68b service nova] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Updating instance_info_cache with network_info: [{"id": "6252a6ad-27a0-47fd-8f00-60c71ed01985", "address": "fa:16:3e:20:fe:e5", "network": {"id": "5e9a6068-d0f1-44b2-b2a2-b87ebc0d53f2", "bridge": "br-int", "label": "tempest-ServersTestJSON-2067990706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "544f2a021144492ba1aea46ce6075e53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6252a6ad-27", "ovs_interfaceid": "6252a6ad-27a0-47fd-8f00-60c71ed01985", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1162.080768] env[69927]: DEBUG nova.compute.manager [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1162.080934] env[69927]: DEBUG nova.network.neutron [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1162.142981] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dce1e610-c31c-4304-8c43-051cce67357c tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "9aa0a285-66e4-4792-bbe9-a62f76666ec6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.447s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1162.146592] env[69927]: DEBUG nova.policy [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de8b1b11969a4feb818dc682d2fec552', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61b1aea0ccf049c8942ba32932412497', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1162.159351] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096677, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.324699] env[69927]: DEBUG oslo_vmware.api [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096678, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.278155} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.325014] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1162.325866] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be9af52-412e-4226-a189-3ee7f3c88cb8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.356091] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] d548ea75-9c1f-4884-b338-194f1b5d62ef/d548ea75-9c1f-4884-b338-194f1b5d62ef.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1162.356500] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4fea8f74-f511-4b21-8371-fdcb7926ad78 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.377304] env[69927]: DEBUG oslo_vmware.api [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1162.377304] env[69927]: value = "task-4096679" [ 1162.377304] env[69927]: _type = "Task" [ 1162.377304] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.386158] env[69927]: DEBUG oslo_vmware.api [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096679, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.438794] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5241b028-f6f0-c235-ce02-0006854f7bc6, 'name': SearchDatastore_Task, 'duration_secs': 0.036635} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.439768] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a4a31ec-540d-4cbb-86b4-9eb16a68b571 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.445738] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1162.445738] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52bc34fe-c6bc-ba70-c97c-81e670ac2c48" [ 1162.445738] env[69927]: _type = "Task" [ 1162.445738] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.455127] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52bc34fe-c6bc-ba70-c97c-81e670ac2c48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.481846] env[69927]: DEBUG nova.network.neutron [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Successfully created port: f232a8db-809f-43b2-ae66-27047a39c8bf {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1162.566228] env[69927]: DEBUG oslo_concurrency.lockutils [req-4cd828b5-c1e8-4a0b-9774-53413359bf1b req-082296e7-b8c0-42c5-b1b7-6f58615a2292 service nova] Releasing lock "refresh_cache-693a6c6b-8d1c-405e-bb17-73259e28f556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1162.582191] env[69927]: DEBUG nova.compute.manager [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1162.585051] env[69927]: DEBUG nova.objects.base [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=69927) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1162.587772] env[69927]: DEBUG oslo_concurrency.lockutils [req-1d3c8c1f-5872-44b6-869b-83c9fc11dafb req-9abc54a3-1a15-4f8b-a7ae-2b824437b68b service nova] Releasing lock "refresh_cache-ba7989a1-a644-4eb7-bf65-20ca1810dd62" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1162.661047] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096677, 'name': ReconfigVM_Task, 'duration_secs': 0.610541} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.661047] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1162.661147] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-08c1c148-76f7-4b4c-9417-679fef47c0fe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.668252] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1162.668252] env[69927]: value = "task-4096680" [ 1162.668252] env[69927]: _type = "Task" [ 1162.668252] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.678052] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096680, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.695805] env[69927]: DEBUG nova.network.neutron [-] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1162.774668] env[69927]: DEBUG nova.network.neutron [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Successfully updated port: 47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1162.858919] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43d55e73-47c1-46b5-9943-a1309e6c822e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.867089] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a21f65-7b3a-45ae-ad42-9bd06424bf9f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.695028] env[69927]: INFO nova.compute.manager [-] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Took 2.62 seconds to deallocate network for instance. [ 1163.695028] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "refresh_cache-da468d11-82a4-4fec-b06a-1b522bacdbc2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.695028] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired lock "refresh_cache-da468d11-82a4-4fec-b06a-1b522bacdbc2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1163.695028] env[69927]: DEBUG nova.network.neutron [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1163.704962] env[69927]: DEBUG nova.compute.manager [req-65b73bdd-21f1-44c2-8c78-60decf1ef6fd req-51576505-7029-4845-b7ea-2fac57cba1ab service nova] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Received event network-vif-plugged-47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1163.707143] env[69927]: DEBUG oslo_concurrency.lockutils [req-65b73bdd-21f1-44c2-8c78-60decf1ef6fd req-51576505-7029-4845-b7ea-2fac57cba1ab service nova] Acquiring lock "da468d11-82a4-4fec-b06a-1b522bacdbc2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.707143] env[69927]: DEBUG oslo_concurrency.lockutils [req-65b73bdd-21f1-44c2-8c78-60decf1ef6fd req-51576505-7029-4845-b7ea-2fac57cba1ab service nova] Lock "da468d11-82a4-4fec-b06a-1b522bacdbc2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1163.707143] env[69927]: DEBUG oslo_concurrency.lockutils [req-65b73bdd-21f1-44c2-8c78-60decf1ef6fd req-51576505-7029-4845-b7ea-2fac57cba1ab service nova] Lock "da468d11-82a4-4fec-b06a-1b522bacdbc2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1163.707143] env[69927]: DEBUG nova.compute.manager [req-65b73bdd-21f1-44c2-8c78-60decf1ef6fd req-51576505-7029-4845-b7ea-2fac57cba1ab service nova] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] No waiting events found dispatching network-vif-plugged-47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1163.707143] env[69927]: WARNING nova.compute.manager [req-65b73bdd-21f1-44c2-8c78-60decf1ef6fd req-51576505-7029-4845-b7ea-2fac57cba1ab service nova] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Received unexpected event network-vif-plugged-47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07 for instance with vm_state building and task_state spawning. [ 1163.707143] env[69927]: DEBUG nova.compute.manager [req-65b73bdd-21f1-44c2-8c78-60decf1ef6fd req-51576505-7029-4845-b7ea-2fac57cba1ab service nova] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Received event network-changed-47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1163.707143] env[69927]: DEBUG nova.compute.manager [req-65b73bdd-21f1-44c2-8c78-60decf1ef6fd req-51576505-7029-4845-b7ea-2fac57cba1ab service nova] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Refreshing instance network info cache due to event network-changed-47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1163.707143] env[69927]: DEBUG oslo_concurrency.lockutils [req-65b73bdd-21f1-44c2-8c78-60decf1ef6fd req-51576505-7029-4845-b7ea-2fac57cba1ab service nova] Acquiring lock "refresh_cache-da468d11-82a4-4fec-b06a-1b522bacdbc2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.707508] env[69927]: DEBUG nova.compute.manager [req-4045869f-ef8a-4623-862e-1609622c6fc0 req-8367b85e-e68c-48a1-816f-39908887a41d service nova] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Received event network-vif-deleted-af2d617c-7a43-466f-b19d-3cce0c52c836 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1163.716771] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-025991c9-c6d2-4296-96f0-a984cce3495b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.731723] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52bc34fe-c6bc-ba70-c97c-81e670ac2c48, 'name': SearchDatastore_Task, 'duration_secs': 0.012236} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.731999] env[69927]: DEBUG oslo_vmware.api [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096679, 'name': ReconfigVM_Task, 'duration_secs': 0.314508} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.737073] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1163.737357] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] ba7989a1-a644-4eb7-bf65-20ca1810dd62/ba7989a1-a644-4eb7-bf65-20ca1810dd62.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1163.737655] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Reconfigured VM instance instance-00000064 to attach disk [datastore2] d548ea75-9c1f-4884-b338-194f1b5d62ef/d548ea75-9c1f-4884-b338-194f1b5d62ef.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1163.738653] env[69927]: DEBUG oslo_vmware.api [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096680, 'name': PowerOnVM_Task, 'duration_secs': 1.033995} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.738868] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5708429a-04a0-4a22-b134-11f9a2fc61d9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.740977] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2b9fb47b-62a9-4b69-9522-3fe11285f331 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.743661] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8efd1fde-cdde-49f9-8a9f-c3b7495884e5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.747455] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1163.750739] env[69927]: DEBUG nova.compute.manager [None req-6c9e092f-bffa-40c1-9502-8ff8dd2f770a tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1163.752708] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8779365-e557-4b53-855d-d8b94a34f9c3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.766683] env[69927]: DEBUG nova.compute.provider_tree [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1163.768076] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1163.768076] env[69927]: value = "task-4096682" [ 1163.768076] env[69927]: _type = "Task" [ 1163.768076] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.768364] env[69927]: DEBUG oslo_vmware.api [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1163.768364] env[69927]: value = "task-4096681" [ 1163.768364] env[69927]: _type = "Task" [ 1163.768364] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.785898] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096682, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.789534] env[69927]: DEBUG oslo_vmware.api [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096681, 'name': Rename_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.027081] env[69927]: DEBUG nova.network.neutron [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Successfully updated port: f232a8db-809f-43b2-ae66-27047a39c8bf {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1164.196141] env[69927]: DEBUG nova.compute.manager [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1164.221228] env[69927]: DEBUG oslo_concurrency.lockutils [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1164.229134] env[69927]: DEBUG nova.virt.hardware [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1164.229489] env[69927]: DEBUG nova.virt.hardware [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1164.229774] env[69927]: DEBUG nova.virt.hardware [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1164.230063] env[69927]: DEBUG nova.virt.hardware [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1164.230253] env[69927]: DEBUG nova.virt.hardware [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1164.230410] env[69927]: DEBUG nova.virt.hardware [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1164.230676] env[69927]: DEBUG nova.virt.hardware [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1164.230835] env[69927]: DEBUG nova.virt.hardware [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1164.231052] env[69927]: DEBUG nova.virt.hardware [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1164.231236] env[69927]: DEBUG nova.virt.hardware [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1164.231443] env[69927]: DEBUG nova.virt.hardware [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1164.232399] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-086631be-930a-4487-ac15-c31ba159c570 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.241241] env[69927]: DEBUG nova.network.neutron [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1164.244267] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d7e749c-3ab0-4900-8e16-dd9afeca76bf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.272814] env[69927]: DEBUG nova.scheduler.client.report [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1164.291197] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096682, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514738} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.295195] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] ba7989a1-a644-4eb7-bf65-20ca1810dd62/ba7989a1-a644-4eb7-bf65-20ca1810dd62.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1164.295464] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1164.295746] env[69927]: DEBUG oslo_vmware.api [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096681, 'name': Rename_Task, 'duration_secs': 0.147893} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.295946] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5c1d841c-4b09-477f-a602-ce0dfafa5db0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.298791] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1164.298998] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-463c0897-688d-4d58-a686-135019af973e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.306393] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1164.306393] env[69927]: value = "task-4096683" [ 1164.306393] env[69927]: _type = "Task" [ 1164.306393] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.313490] env[69927]: DEBUG oslo_vmware.api [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1164.313490] env[69927]: value = "task-4096684" [ 1164.313490] env[69927]: _type = "Task" [ 1164.313490] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.321565] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096683, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.330548] env[69927]: DEBUG oslo_vmware.api [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096684, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.431850] env[69927]: DEBUG nova.network.neutron [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Updating instance_info_cache with network_info: [{"id": "47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07", "address": "fa:16:3e:ed:aa:67", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47e9852f-a4", "ovs_interfaceid": "47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.530790] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "refresh_cache-6828dc80-2e0e-4715-a620-42edbe5eec2f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1164.531096] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired lock "refresh_cache-6828dc80-2e0e-4715-a620-42edbe5eec2f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1164.531264] env[69927]: DEBUG nova.network.neutron [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1164.783881] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.213s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.787151] env[69927]: DEBUG oslo_concurrency.lockutils [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.813s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1164.787471] env[69927]: DEBUG nova.objects.instance [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lazy-loading 'resources' on Instance uuid 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1164.823919] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096683, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092481} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.827678] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1164.828479] env[69927]: DEBUG oslo_vmware.api [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096684, 'name': PowerOnVM_Task, 'duration_secs': 0.511051} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.829804] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d616ef0-21fe-4bb7-ab5a-25b074eb7b7f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.832618] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1164.832877] env[69927]: INFO nova.compute.manager [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Took 8.69 seconds to spawn the instance on the hypervisor. [ 1164.833099] env[69927]: DEBUG nova.compute.manager [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1164.834151] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bec0556-feb5-4a8f-900a-ef4f75d5d93a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.859022] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] ba7989a1-a644-4eb7-bf65-20ca1810dd62/ba7989a1-a644-4eb7-bf65-20ca1810dd62.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1164.861442] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03e30cb9-b2af-4dc8-b675-142351e67555 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.885684] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1164.885684] env[69927]: value = "task-4096685" [ 1164.885684] env[69927]: _type = "Task" [ 1164.885684] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.894654] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096685, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.935185] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Releasing lock "refresh_cache-da468d11-82a4-4fec-b06a-1b522bacdbc2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1164.935554] env[69927]: DEBUG nova.compute.manager [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Instance network_info: |[{"id": "47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07", "address": "fa:16:3e:ed:aa:67", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47e9852f-a4", "ovs_interfaceid": "47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1164.935859] env[69927]: DEBUG oslo_concurrency.lockutils [req-65b73bdd-21f1-44c2-8c78-60decf1ef6fd req-51576505-7029-4845-b7ea-2fac57cba1ab service nova] Acquired lock "refresh_cache-da468d11-82a4-4fec-b06a-1b522bacdbc2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1164.936059] env[69927]: DEBUG nova.network.neutron [req-65b73bdd-21f1-44c2-8c78-60decf1ef6fd req-51576505-7029-4845-b7ea-2fac57cba1ab service nova] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Refreshing network info cache for port 47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1164.937707] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:aa:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '57691231-2b8d-4d71-8f79-d4a6a1d95ec8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1164.946829] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Creating folder: Project (dee421c661394f3fbf8d69a575f095a9). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1164.949078] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8e967901-0430-4e89-a0b3-5c9a6625cf33 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.962131] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Created folder: Project (dee421c661394f3fbf8d69a575f095a9) in parent group-v811283. [ 1164.962432] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Creating folder: Instances. Parent ref: group-v811569. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1164.962708] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f572c656-559d-456d-bdb6-3871e9b2df05 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.974780] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Created folder: Instances in parent group-v811569. [ 1164.975042] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1164.975244] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1164.975455] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-413d4563-148a-4cb9-9690-4fac1ff670f6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.999580] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1164.999580] env[69927]: value = "task-4096688" [ 1164.999580] env[69927]: _type = "Task" [ 1164.999580] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.008367] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096688, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.068533] env[69927]: DEBUG nova.network.neutron [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1165.169187] env[69927]: DEBUG nova.network.neutron [req-65b73bdd-21f1-44c2-8c78-60decf1ef6fd req-51576505-7029-4845-b7ea-2fac57cba1ab service nova] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Updated VIF entry in instance network info cache for port 47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1165.170040] env[69927]: DEBUG nova.network.neutron [req-65b73bdd-21f1-44c2-8c78-60decf1ef6fd req-51576505-7029-4845-b7ea-2fac57cba1ab service nova] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Updating instance_info_cache with network_info: [{"id": "47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07", "address": "fa:16:3e:ed:aa:67", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47e9852f-a4", "ovs_interfaceid": "47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1165.198735] env[69927]: INFO nova.compute.manager [None req-9fd6e3f7-e7d6-4b00-a546-0f53245417dc tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Unrescuing [ 1165.199052] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9fd6e3f7-e7d6-4b00-a546-0f53245417dc tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "refresh_cache-15c44d86-829f-4317-ab66-9e61d4fb4dd0" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1165.199256] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9fd6e3f7-e7d6-4b00-a546-0f53245417dc tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquired lock "refresh_cache-15c44d86-829f-4317-ab66-9e61d4fb4dd0" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1165.199486] env[69927]: DEBUG nova.network.neutron [None req-9fd6e3f7-e7d6-4b00-a546-0f53245417dc tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1165.213573] env[69927]: DEBUG nova.network.neutron [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Updating instance_info_cache with network_info: [{"id": "f232a8db-809f-43b2-ae66-27047a39c8bf", "address": "fa:16:3e:61:88:f3", "network": {"id": "625b5644-8a8f-4789-8c96-083982c1bf4c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-124597975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61b1aea0ccf049c8942ba32932412497", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf232a8db-80", "ovs_interfaceid": "f232a8db-809f-43b2-ae66-27047a39c8bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1165.296862] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a619a0a1-626a-44a8-aef3-31a160e9524d tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 32.758s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.299831] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 10.015s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1165.299831] env[69927]: INFO nova.compute.manager [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Unshelving [ 1165.322706] env[69927]: DEBUG nova.compute.manager [req-43ad815c-8af8-4e31-84ca-61fe32d57c05 req-cd98d54b-3dd3-4f77-904b-ca94292bc4aa service nova] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Received event network-vif-plugged-f232a8db-809f-43b2-ae66-27047a39c8bf {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1165.322966] env[69927]: DEBUG oslo_concurrency.lockutils [req-43ad815c-8af8-4e31-84ca-61fe32d57c05 req-cd98d54b-3dd3-4f77-904b-ca94292bc4aa service nova] Acquiring lock "6828dc80-2e0e-4715-a620-42edbe5eec2f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1165.323678] env[69927]: DEBUG oslo_concurrency.lockutils [req-43ad815c-8af8-4e31-84ca-61fe32d57c05 req-cd98d54b-3dd3-4f77-904b-ca94292bc4aa service nova] Lock "6828dc80-2e0e-4715-a620-42edbe5eec2f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1165.323877] env[69927]: DEBUG oslo_concurrency.lockutils [req-43ad815c-8af8-4e31-84ca-61fe32d57c05 req-cd98d54b-3dd3-4f77-904b-ca94292bc4aa service nova] Lock "6828dc80-2e0e-4715-a620-42edbe5eec2f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.324216] env[69927]: DEBUG nova.compute.manager [req-43ad815c-8af8-4e31-84ca-61fe32d57c05 req-cd98d54b-3dd3-4f77-904b-ca94292bc4aa service nova] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] No waiting events found dispatching network-vif-plugged-f232a8db-809f-43b2-ae66-27047a39c8bf {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1165.324522] env[69927]: WARNING nova.compute.manager [req-43ad815c-8af8-4e31-84ca-61fe32d57c05 req-cd98d54b-3dd3-4f77-904b-ca94292bc4aa service nova] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Received unexpected event network-vif-plugged-f232a8db-809f-43b2-ae66-27047a39c8bf for instance with vm_state building and task_state spawning. [ 1165.324522] env[69927]: DEBUG nova.compute.manager [req-43ad815c-8af8-4e31-84ca-61fe32d57c05 req-cd98d54b-3dd3-4f77-904b-ca94292bc4aa service nova] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Received event network-changed-f232a8db-809f-43b2-ae66-27047a39c8bf {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1165.324842] env[69927]: DEBUG nova.compute.manager [req-43ad815c-8af8-4e31-84ca-61fe32d57c05 req-cd98d54b-3dd3-4f77-904b-ca94292bc4aa service nova] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Refreshing instance network info cache due to event network-changed-f232a8db-809f-43b2-ae66-27047a39c8bf. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1165.324842] env[69927]: DEBUG oslo_concurrency.lockutils [req-43ad815c-8af8-4e31-84ca-61fe32d57c05 req-cd98d54b-3dd3-4f77-904b-ca94292bc4aa service nova] Acquiring lock "refresh_cache-6828dc80-2e0e-4715-a620-42edbe5eec2f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1165.396234] env[69927]: INFO nova.compute.manager [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Took 19.68 seconds to build instance. [ 1165.404123] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096685, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.518147] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096688, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.521460] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2136478-f6a3-4741-9c49-16e64ed07f62 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.531382] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa72c8dd-f4b1-4ae7-b1b9-df68f9ac4738 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.567324] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b82c8930-b886-4765-a0cc-23ee53c37bff {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.575105] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b6cbca-5c3e-4d14-9213-756ea9256ec7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.591395] env[69927]: DEBUG nova.compute.provider_tree [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1165.675041] env[69927]: DEBUG oslo_concurrency.lockutils [req-65b73bdd-21f1-44c2-8c78-60decf1ef6fd req-51576505-7029-4845-b7ea-2fac57cba1ab service nova] Releasing lock "refresh_cache-da468d11-82a4-4fec-b06a-1b522bacdbc2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1165.675041] env[69927]: DEBUG nova.compute.manager [req-65b73bdd-21f1-44c2-8c78-60decf1ef6fd req-51576505-7029-4845-b7ea-2fac57cba1ab service nova] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Received event network-changed-c1c89675-9c86-4cf9-9c34-fdea74b6cf04 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1165.675041] env[69927]: DEBUG nova.compute.manager [req-65b73bdd-21f1-44c2-8c78-60decf1ef6fd req-51576505-7029-4845-b7ea-2fac57cba1ab service nova] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Refreshing instance network info cache due to event network-changed-c1c89675-9c86-4cf9-9c34-fdea74b6cf04. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1165.675041] env[69927]: DEBUG oslo_concurrency.lockutils [req-65b73bdd-21f1-44c2-8c78-60decf1ef6fd req-51576505-7029-4845-b7ea-2fac57cba1ab service nova] Acquiring lock "refresh_cache-7ff17f1d-31fd-440b-906c-2719770a9151" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1165.675041] env[69927]: DEBUG oslo_concurrency.lockutils [req-65b73bdd-21f1-44c2-8c78-60decf1ef6fd req-51576505-7029-4845-b7ea-2fac57cba1ab service nova] Acquired lock "refresh_cache-7ff17f1d-31fd-440b-906c-2719770a9151" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1165.675322] env[69927]: DEBUG nova.network.neutron [req-65b73bdd-21f1-44c2-8c78-60decf1ef6fd req-51576505-7029-4845-b7ea-2fac57cba1ab service nova] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Refreshing network info cache for port c1c89675-9c86-4cf9-9c34-fdea74b6cf04 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1165.716559] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Releasing lock "refresh_cache-6828dc80-2e0e-4715-a620-42edbe5eec2f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1165.716911] env[69927]: DEBUG nova.compute.manager [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Instance network_info: |[{"id": "f232a8db-809f-43b2-ae66-27047a39c8bf", "address": "fa:16:3e:61:88:f3", "network": {"id": "625b5644-8a8f-4789-8c96-083982c1bf4c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-124597975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61b1aea0ccf049c8942ba32932412497", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf232a8db-80", "ovs_interfaceid": "f232a8db-809f-43b2-ae66-27047a39c8bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1165.717245] env[69927]: DEBUG oslo_concurrency.lockutils [req-43ad815c-8af8-4e31-84ca-61fe32d57c05 req-cd98d54b-3dd3-4f77-904b-ca94292bc4aa service nova] Acquired lock "refresh_cache-6828dc80-2e0e-4715-a620-42edbe5eec2f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1165.717447] env[69927]: DEBUG nova.network.neutron [req-43ad815c-8af8-4e31-84ca-61fe32d57c05 req-cd98d54b-3dd3-4f77-904b-ca94292bc4aa service nova] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Refreshing network info cache for port f232a8db-809f-43b2-ae66-27047a39c8bf {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1165.718794] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:88:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '415e68b4-3766-4359-afe2-f8563910d98c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f232a8db-809f-43b2-ae66-27047a39c8bf', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1165.728158] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1165.731450] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1165.732018] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb71b30f-e11c-441a-99c8-e6fe7fbc8705 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.753968] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1165.753968] env[69927]: value = "task-4096689" [ 1165.753968] env[69927]: _type = "Task" [ 1165.753968] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.764286] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096689, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.901631] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c7df8007-f7ff-4bb4-bc34-753af7ec13ca tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "d548ea75-9c1f-4884-b338-194f1b5d62ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.196s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.901997] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096685, 'name': ReconfigVM_Task, 'duration_secs': 0.954473} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.902187] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Reconfigured VM instance instance-00000065 to attach disk [datastore1] ba7989a1-a644-4eb7-bf65-20ca1810dd62/ba7989a1-a644-4eb7-bf65-20ca1810dd62.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1165.902832] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1d370987-b9c7-4c61-813e-ebf49cc34280 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.910498] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1165.910498] env[69927]: value = "task-4096690" [ 1165.910498] env[69927]: _type = "Task" [ 1165.910498] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.921139] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096690, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.014285] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096688, 'name': CreateVM_Task, 'duration_secs': 0.647403} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.014460] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1166.015383] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1166.015550] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1166.015946] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1166.019850] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-445c6975-c80d-4bfe-a9e4-0a010dee0b2d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.026126] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1166.026126] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e0b8dc-2901-2ca8-4677-b299145738f5" [ 1166.026126] env[69927]: _type = "Task" [ 1166.026126] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.031262] env[69927]: DEBUG nova.network.neutron [req-43ad815c-8af8-4e31-84ca-61fe32d57c05 req-cd98d54b-3dd3-4f77-904b-ca94292bc4aa service nova] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Updated VIF entry in instance network info cache for port f232a8db-809f-43b2-ae66-27047a39c8bf. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1166.031807] env[69927]: DEBUG nova.network.neutron [req-43ad815c-8af8-4e31-84ca-61fe32d57c05 req-cd98d54b-3dd3-4f77-904b-ca94292bc4aa service nova] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Updating instance_info_cache with network_info: [{"id": "f232a8db-809f-43b2-ae66-27047a39c8bf", "address": "fa:16:3e:61:88:f3", "network": {"id": "625b5644-8a8f-4789-8c96-083982c1bf4c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-124597975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61b1aea0ccf049c8942ba32932412497", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf232a8db-80", "ovs_interfaceid": "f232a8db-809f-43b2-ae66-27047a39c8bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1166.041211] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e0b8dc-2901-2ca8-4677-b299145738f5, 'name': SearchDatastore_Task, 'duration_secs': 0.011626} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.041525] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1166.041752] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1166.041981] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1166.044019] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1166.044019] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1166.044019] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6ba0fbf0-fb0a-456a-a7c5-11e51ee351b1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.053363] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1166.053600] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1166.054290] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-763600a5-4893-487d-a417-91a3963dc166 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.060491] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1166.060491] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52570f4e-1839-b056-73eb-88a07cfdcbaf" [ 1166.060491] env[69927]: _type = "Task" [ 1166.060491] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.070880] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52570f4e-1839-b056-73eb-88a07cfdcbaf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.095237] env[69927]: DEBUG nova.scheduler.client.report [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1166.204823] env[69927]: DEBUG nova.network.neutron [None req-9fd6e3f7-e7d6-4b00-a546-0f53245417dc tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Updating instance_info_cache with network_info: [{"id": "30a26167-3dd4-4729-be64-03c251eaaa48", "address": "fa:16:3e:0e:18:44", "network": {"id": "527076e8-f800-4686-883d-e70629d8ba0d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-704161827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de9e293e2d1a4e179f01f60e882851b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30a26167-3d", "ovs_interfaceid": "30a26167-3dd4-4729-be64-03c251eaaa48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1166.265191] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096689, 'name': CreateVM_Task, 'duration_secs': 0.344069} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.265392] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1166.265973] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1166.266154] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1166.266459] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1166.266709] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d828dae-5a49-474a-9fd3-e7c179c88205 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.271773] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1166.271773] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52988a4d-0def-6b3c-5c73-062d2a43cfec" [ 1166.271773] env[69927]: _type = "Task" [ 1166.271773] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.281075] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52988a4d-0def-6b3c-5c73-062d2a43cfec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.322846] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.421381] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096690, 'name': Rename_Task, 'duration_secs': 0.201728} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.421693] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1166.421947] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6d6cceac-b22e-448d-9cb1-4757a3c80a4d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.425252] env[69927]: DEBUG nova.network.neutron [req-65b73bdd-21f1-44c2-8c78-60decf1ef6fd req-51576505-7029-4845-b7ea-2fac57cba1ab service nova] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Updated VIF entry in instance network info cache for port c1c89675-9c86-4cf9-9c34-fdea74b6cf04. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1166.425252] env[69927]: DEBUG nova.network.neutron [req-65b73bdd-21f1-44c2-8c78-60decf1ef6fd req-51576505-7029-4845-b7ea-2fac57cba1ab service nova] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Updating instance_info_cache with network_info: [{"id": "c1c89675-9c86-4cf9-9c34-fdea74b6cf04", "address": "fa:16:3e:71:4d:cf", "network": {"id": "dd75b393-aca1-4de7-8249-d8eec36d040d", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1189166772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "999896abcbbd4ceea4fc2d898e025bc3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f85835c8-5d0c-4b2f-97c4-6c4006580f79", "external-id": "nsx-vlan-transportzone-245", "segmentation_id": 245, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1c89675-9c", "ovs_interfaceid": "c1c89675-9c86-4cf9-9c34-fdea74b6cf04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1166.432342] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1166.432342] env[69927]: value = "task-4096691" [ 1166.432342] env[69927]: _type = "Task" [ 1166.432342] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.440868] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096691, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.519459] env[69927]: DEBUG nova.compute.manager [req-cae94e93-24f7-4916-a538-af6219c7b281 req-d500b62c-5a7e-48b0-bd87-77872446b32c service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Received event network-changed-b92f830b-5eef-4260-a56b-94af4a4ec679 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1166.519716] env[69927]: DEBUG nova.compute.manager [req-cae94e93-24f7-4916-a538-af6219c7b281 req-d500b62c-5a7e-48b0-bd87-77872446b32c service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Refreshing instance network info cache due to event network-changed-b92f830b-5eef-4260-a56b-94af4a4ec679. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1166.519987] env[69927]: DEBUG oslo_concurrency.lockutils [req-cae94e93-24f7-4916-a538-af6219c7b281 req-d500b62c-5a7e-48b0-bd87-77872446b32c service nova] Acquiring lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1166.520274] env[69927]: DEBUG oslo_concurrency.lockutils [req-cae94e93-24f7-4916-a538-af6219c7b281 req-d500b62c-5a7e-48b0-bd87-77872446b32c service nova] Acquired lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1166.520483] env[69927]: DEBUG nova.network.neutron [req-cae94e93-24f7-4916-a538-af6219c7b281 req-d500b62c-5a7e-48b0-bd87-77872446b32c service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Refreshing network info cache for port b92f830b-5eef-4260-a56b-94af4a4ec679 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1166.535641] env[69927]: DEBUG oslo_concurrency.lockutils [req-43ad815c-8af8-4e31-84ca-61fe32d57c05 req-cd98d54b-3dd3-4f77-904b-ca94292bc4aa service nova] Releasing lock "refresh_cache-6828dc80-2e0e-4715-a620-42edbe5eec2f" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1166.576120] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52570f4e-1839-b056-73eb-88a07cfdcbaf, 'name': SearchDatastore_Task, 'duration_secs': 0.01233} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.576976] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd0536c8-12c8-4722-9ddb-b19ca597e36b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.585770] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1166.585770] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d569b2-47ae-906e-8793-15e3da9e227d" [ 1166.585770] env[69927]: _type = "Task" [ 1166.585770] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.596116] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d569b2-47ae-906e-8793-15e3da9e227d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.601143] env[69927]: DEBUG oslo_concurrency.lockutils [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.814s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.603687] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.815s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.603941] env[69927]: DEBUG nova.objects.instance [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Lazy-loading 'resources' on Instance uuid 406828cc-c6aa-4686-827d-c7c8e28ffb8e {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1166.633850] env[69927]: INFO nova.scheduler.client.report [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Deleted allocations for instance 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1 [ 1166.709707] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9fd6e3f7-e7d6-4b00-a546-0f53245417dc tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Releasing lock "refresh_cache-15c44d86-829f-4317-ab66-9e61d4fb4dd0" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1166.709707] env[69927]: DEBUG nova.objects.instance [None req-9fd6e3f7-e7d6-4b00-a546-0f53245417dc tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lazy-loading 'flavor' on Instance uuid 15c44d86-829f-4317-ab66-9e61d4fb4dd0 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1166.782244] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52988a4d-0def-6b3c-5c73-062d2a43cfec, 'name': SearchDatastore_Task, 'duration_secs': 0.013761} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.782571] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1166.782819] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1166.783045] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1166.927867] env[69927]: DEBUG oslo_concurrency.lockutils [req-65b73bdd-21f1-44c2-8c78-60decf1ef6fd req-51576505-7029-4845-b7ea-2fac57cba1ab service nova] Releasing lock "refresh_cache-7ff17f1d-31fd-440b-906c-2719770a9151" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1166.944970] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096691, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.097343] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d569b2-47ae-906e-8793-15e3da9e227d, 'name': SearchDatastore_Task, 'duration_secs': 0.015275} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.097343] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1167.097343] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] da468d11-82a4-4fec-b06a-1b522bacdbc2/da468d11-82a4-4fec-b06a-1b522bacdbc2.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1167.097615] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1167.097812] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1167.098070] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-82535f70-cfb8-45f6-a5dc-feaf86bf3e9c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.103427] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-06ddfdd1-9415-46ba-8a8b-1de4983027c5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.115025] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1167.115025] env[69927]: value = "task-4096692" [ 1167.115025] env[69927]: _type = "Task" [ 1167.115025] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.120934] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1167.125020] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1167.125020] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5674b929-89ed-4b53-9a95-046c9d808df4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.132398] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096692, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.141395] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1167.141395] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52c527fd-886a-dcae-2479-a756f92b49ca" [ 1167.141395] env[69927]: _type = "Task" [ 1167.141395] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.158318] env[69927]: DEBUG oslo_concurrency.lockutils [None req-03f93578-8fce-4835-a77c-b849a84f0efe tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "2c0c2704-1ccb-4e1f-95e9-62e44b751cc1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.237s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.165802] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52c527fd-886a-dcae-2479-a756f92b49ca, 'name': SearchDatastore_Task, 'duration_secs': 0.020318} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.167041] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5809077e-f634-4422-a240-6f7da8dab070 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.181281] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1167.181281] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52881448-5789-fbcf-3b74-82dc48a117d2" [ 1167.181281] env[69927]: _type = "Task" [ 1167.181281] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.192879] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52881448-5789-fbcf-3b74-82dc48a117d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.216063] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ded718-db10-4160-8e72-fddd9183b425 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.242069] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fd6e3f7-e7d6-4b00-a546-0f53245417dc tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1167.245531] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fb71b0b3-8c2e-4827-ace7-d00c77e3130a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.255133] env[69927]: DEBUG oslo_vmware.api [None req-9fd6e3f7-e7d6-4b00-a546-0f53245417dc tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1167.255133] env[69927]: value = "task-4096693" [ 1167.255133] env[69927]: _type = "Task" [ 1167.255133] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.267944] env[69927]: DEBUG oslo_vmware.api [None req-9fd6e3f7-e7d6-4b00-a546-0f53245417dc tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096693, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.324373] env[69927]: DEBUG nova.network.neutron [req-cae94e93-24f7-4916-a538-af6219c7b281 req-d500b62c-5a7e-48b0-bd87-77872446b32c service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Updated VIF entry in instance network info cache for port b92f830b-5eef-4260-a56b-94af4a4ec679. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1167.324764] env[69927]: DEBUG nova.network.neutron [req-cae94e93-24f7-4916-a538-af6219c7b281 req-d500b62c-5a7e-48b0-bd87-77872446b32c service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Updating instance_info_cache with network_info: [{"id": "b92f830b-5eef-4260-a56b-94af4a4ec679", "address": "fa:16:3e:a4:e7:1b", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb92f830b-5e", "ovs_interfaceid": "b92f830b-5eef-4260-a56b-94af4a4ec679", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.458629] env[69927]: INFO nova.compute.manager [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Rebuilding instance [ 1167.462242] env[69927]: DEBUG oslo_vmware.api [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096691, 'name': PowerOnVM_Task, 'duration_secs': 0.841689} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.466531] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1167.467171] env[69927]: INFO nova.compute.manager [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Took 8.87 seconds to spawn the instance on the hypervisor. [ 1167.467171] env[69927]: DEBUG nova.compute.manager [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1167.468745] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-995fa7c4-f5ef-4899-b85f-d863540c1205 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.511739] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd56f0c-6bcf-4f5d-ae3b-df9a758a52a1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.523258] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "5c87c74d-5998-4dfc-bc3c-c2887ff25195" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.523258] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "5c87c74d-5998-4dfc-bc3c-c2887ff25195" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.528845] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15ec0818-117b-47b2-9b0d-ed8d3b0f9f3a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.536696] env[69927]: DEBUG nova.compute.manager [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1167.538633] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d4805cf-b650-406b-939e-4f68b480d366 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.569627] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d63c94c-305a-4c0e-8ab3-06814084c074 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.584250] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3033534b-0829-44b0-8287-3cd38c2aab6d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.602326] env[69927]: DEBUG nova.compute.provider_tree [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1167.623349] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096692, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.695436] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52881448-5789-fbcf-3b74-82dc48a117d2, 'name': SearchDatastore_Task, 'duration_secs': 0.014644} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.695744] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1167.696026] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 6828dc80-2e0e-4715-a620-42edbe5eec2f/6828dc80-2e0e-4715-a620-42edbe5eec2f.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1167.696331] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c4e1fb70-8e28-46f4-b7b0-071ec31eff58 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.705226] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1167.705226] env[69927]: value = "task-4096694" [ 1167.705226] env[69927]: _type = "Task" [ 1167.705226] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.714943] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096694, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.765828] env[69927]: DEBUG oslo_vmware.api [None req-9fd6e3f7-e7d6-4b00-a546-0f53245417dc tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096693, 'name': PowerOffVM_Task, 'duration_secs': 0.253321} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.766187] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fd6e3f7-e7d6-4b00-a546-0f53245417dc tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1167.771583] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fd6e3f7-e7d6-4b00-a546-0f53245417dc tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Reconfiguring VM instance instance-0000005e to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1167.771903] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c46894e4-65e2-46b1-8558-35acb4eb0f49 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.792360] env[69927]: DEBUG oslo_vmware.api [None req-9fd6e3f7-e7d6-4b00-a546-0f53245417dc tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1167.792360] env[69927]: value = "task-4096695" [ 1167.792360] env[69927]: _type = "Task" [ 1167.792360] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.802196] env[69927]: DEBUG oslo_vmware.api [None req-9fd6e3f7-e7d6-4b00-a546-0f53245417dc tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096695, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.827843] env[69927]: DEBUG oslo_concurrency.lockutils [req-cae94e93-24f7-4916-a538-af6219c7b281 req-d500b62c-5a7e-48b0-bd87-77872446b32c service nova] Releasing lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1167.997294] env[69927]: INFO nova.compute.manager [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Took 22.06 seconds to build instance. [ 1168.028582] env[69927]: DEBUG nova.compute.manager [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1168.106019] env[69927]: DEBUG nova.scheduler.client.report [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1168.123015] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096692, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.216034] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096694, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.303111] env[69927]: DEBUG oslo_vmware.api [None req-9fd6e3f7-e7d6-4b00-a546-0f53245417dc tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096695, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.492168] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "56aec5c2-d344-4a8d-a55a-930bc425150a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.492417] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "56aec5c2-d344-4a8d-a55a-930bc425150a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.500223] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ba321465-0cca-4e4f-a418-424b183c1a03 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "ba7989a1-a644-4eb7-bf65-20ca1810dd62" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.603s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.555783] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.582674] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1168.582989] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0a63faf3-3329-4596-996c-06b74f8827fa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.592571] env[69927]: DEBUG oslo_vmware.api [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Waiting for the task: (returnval){ [ 1168.592571] env[69927]: value = "task-4096696" [ 1168.592571] env[69927]: _type = "Task" [ 1168.592571] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.602486] env[69927]: DEBUG oslo_vmware.api [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096696, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.614673] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.011s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.617398] env[69927]: DEBUG oslo_concurrency.lockutils [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.396s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.617599] env[69927]: DEBUG oslo_concurrency.lockutils [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.619626] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.297s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.619858] env[69927]: DEBUG nova.objects.instance [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lazy-loading 'pci_requests' on Instance uuid cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1168.631213] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096692, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.635948] env[69927]: INFO nova.scheduler.client.report [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Deleted allocations for instance 406828cc-c6aa-4686-827d-c7c8e28ffb8e [ 1168.640856] env[69927]: INFO nova.scheduler.client.report [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Deleted allocations for instance 4b7934f8-2c97-480b-8af7-f09f6819e2b6 [ 1168.717510] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096694, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.735762] env[69927]: DEBUG oslo_concurrency.lockutils [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "ba7989a1-a644-4eb7-bf65-20ca1810dd62" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.735973] env[69927]: DEBUG oslo_concurrency.lockutils [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "ba7989a1-a644-4eb7-bf65-20ca1810dd62" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.736290] env[69927]: DEBUG oslo_concurrency.lockutils [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "ba7989a1-a644-4eb7-bf65-20ca1810dd62-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.736596] env[69927]: DEBUG oslo_concurrency.lockutils [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "ba7989a1-a644-4eb7-bf65-20ca1810dd62-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.736888] env[69927]: DEBUG oslo_concurrency.lockutils [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "ba7989a1-a644-4eb7-bf65-20ca1810dd62-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.739291] env[69927]: INFO nova.compute.manager [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Terminating instance [ 1168.805441] env[69927]: DEBUG oslo_vmware.api [None req-9fd6e3f7-e7d6-4b00-a546-0f53245417dc tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096695, 'name': ReconfigVM_Task, 'duration_secs': 0.517375} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.805899] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fd6e3f7-e7d6-4b00-a546-0f53245417dc tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Reconfigured VM instance instance-0000005e to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1168.806180] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fd6e3f7-e7d6-4b00-a546-0f53245417dc tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1168.806566] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8801d3d7-6b2b-4098-9b2a-cc0f8d846078 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.815226] env[69927]: DEBUG oslo_vmware.api [None req-9fd6e3f7-e7d6-4b00-a546-0f53245417dc tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1168.815226] env[69927]: value = "task-4096697" [ 1168.815226] env[69927]: _type = "Task" [ 1168.815226] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.827502] env[69927]: DEBUG oslo_vmware.api [None req-9fd6e3f7-e7d6-4b00-a546-0f53245417dc tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096697, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.997238] env[69927]: DEBUG nova.compute.manager [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1169.106069] env[69927]: DEBUG oslo_vmware.api [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096696, 'name': PowerOffVM_Task, 'duration_secs': 0.206093} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.106498] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1169.107260] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1169.107559] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5ca7f2d7-6326-4ee1-aca9-33ad76b7f0af {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.116431] env[69927]: DEBUG oslo_vmware.api [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Waiting for the task: (returnval){ [ 1169.116431] env[69927]: value = "task-4096698" [ 1169.116431] env[69927]: _type = "Task" [ 1169.116431] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.120602] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1169.124524] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1169.131722] env[69927]: DEBUG nova.objects.instance [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lazy-loading 'numa_topology' on Instance uuid cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1169.138135] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] VM already powered off {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1169.138396] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Volume detach. Driver type: vmdk {{(pid=69927) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1169.138659] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811540', 'volume_id': '110b2a96-6541-4296-9d43-a5c1d0562ba9', 'name': 'volume-110b2a96-6541-4296-9d43-a5c1d0562ba9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3', 'attached_at': '', 'detached_at': '', 'volume_id': '110b2a96-6541-4296-9d43-a5c1d0562ba9', 'serial': '110b2a96-6541-4296-9d43-a5c1d0562ba9'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1169.142749] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c337b735-a845-4f1c-88eb-1dae2da8f96b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.150962] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096692, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.67582} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.151478] env[69927]: DEBUG oslo_concurrency.lockutils [None req-43c26a9e-b36d-4664-9ff1-502a2db23643 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "4b7934f8-2c97-480b-8af7-f09f6819e2b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.781s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.153382] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d6b02302-b3cf-4791-9d61-64eba5a43eac tempest-ServersWithSpecificFlavorTestJSON-1780415271 tempest-ServersWithSpecificFlavorTestJSON-1780415271-project-member] Lock "406828cc-c6aa-4686-827d-c7c8e28ffb8e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.487s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.154917] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] da468d11-82a4-4fec-b06a-1b522bacdbc2/da468d11-82a4-4fec-b06a-1b522bacdbc2.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1169.155174] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1169.156038] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fe48a320-4ccb-4ad5-bba5-4530b537c583 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.176357] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3110251e-97eb-46a0-9aac-1374c8c5db70 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.181393] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1169.181393] env[69927]: value = "task-4096699" [ 1169.181393] env[69927]: _type = "Task" [ 1169.181393] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.188974] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-419d2e1b-d88e-4f4b-bc53-364763d2d327 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.195554] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096699, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.219898] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-749a6329-cf07-48bd-acb4-2d5896ca303e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.231560] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096694, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.244711] env[69927]: DEBUG nova.compute.manager [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1169.244937] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1169.245339] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] The volume has not been displaced from its original location: [datastore1] volume-110b2a96-6541-4296-9d43-a5c1d0562ba9/volume-110b2a96-6541-4296-9d43-a5c1d0562ba9.vmdk. No consolidation needed. {{(pid=69927) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1169.251292] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Reconfiguring VM instance instance-0000005d to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1169.252393] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72bfc852-cd9b-4cfe-a373-8cb907dceca1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.255916] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eeb1d777-0ab6-4242-bb17-4ddbae360f30 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.277231] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1169.278833] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c3bd6978-1898-49f5-9765-321c6b6c1e93 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.280681] env[69927]: DEBUG oslo_vmware.api [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Waiting for the task: (returnval){ [ 1169.280681] env[69927]: value = "task-4096700" [ 1169.280681] env[69927]: _type = "Task" [ 1169.280681] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.290088] env[69927]: DEBUG oslo_vmware.api [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1169.290088] env[69927]: value = "task-4096701" [ 1169.290088] env[69927]: _type = "Task" [ 1169.290088] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.294159] env[69927]: DEBUG oslo_vmware.api [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096700, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.305303] env[69927]: DEBUG oslo_vmware.api [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096701, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.324669] env[69927]: DEBUG oslo_vmware.api [None req-9fd6e3f7-e7d6-4b00-a546-0f53245417dc tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096697, 'name': PowerOnVM_Task, 'duration_secs': 0.453658} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.324985] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fd6e3f7-e7d6-4b00-a546-0f53245417dc tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1169.325286] env[69927]: DEBUG nova.compute.manager [None req-9fd6e3f7-e7d6-4b00-a546-0f53245417dc tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1169.326115] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee845f0-c16b-4e8c-b7a8-d6cd5238d173 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.525590] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1169.636181] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1169.636406] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1169.636795] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1169.636795] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1169.636965] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1169.637079] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1169.637127] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69927) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1169.637263] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager.update_available_resource {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1169.639562] env[69927]: INFO nova.compute.claims [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1169.694298] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096699, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109879} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.694740] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1169.695921] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c4aa8c-c863-4883-b24b-3d77b1bc71dd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.723454] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] da468d11-82a4-4fec-b06a-1b522bacdbc2/da468d11-82a4-4fec-b06a-1b522bacdbc2.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1169.724168] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8b206fc-dbf7-48c3-8543-7385446ee2ea {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.747478] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096694, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.596232} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.748821] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 6828dc80-2e0e-4715-a620-42edbe5eec2f/6828dc80-2e0e-4715-a620-42edbe5eec2f.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1169.749123] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1169.749465] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1169.749465] env[69927]: value = "task-4096702" [ 1169.749465] env[69927]: _type = "Task" [ 1169.749465] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.749669] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1155e60c-faba-4969-8d97-70d9393a1252 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.761192] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096702, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.763088] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1169.763088] env[69927]: value = "task-4096703" [ 1169.763088] env[69927]: _type = "Task" [ 1169.763088] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.773624] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096703, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.792644] env[69927]: DEBUG oslo_vmware.api [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096700, 'name': ReconfigVM_Task, 'duration_secs': 0.256147} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.793384] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Reconfigured VM instance instance-0000005d to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1169.798424] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abb153e0-b2a4-48a1-8e0a-cd7ed2f8aeb9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.818666] env[69927]: DEBUG oslo_vmware.api [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096701, 'name': PowerOffVM_Task, 'duration_secs': 0.275208} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.820158] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1169.820437] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1169.820683] env[69927]: DEBUG oslo_vmware.api [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Waiting for the task: (returnval){ [ 1169.820683] env[69927]: value = "task-4096704" [ 1169.820683] env[69927]: _type = "Task" [ 1169.820683] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.821045] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-11b38a0f-3a9e-47e4-8a29-a7ec5329f699 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.832150] env[69927]: DEBUG oslo_vmware.api [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096704, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.913433] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1169.913694] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1169.913925] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Deleting the datastore file [datastore1] ba7989a1-a644-4eb7-bf65-20ca1810dd62 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1169.914187] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2fa94bbc-59b8-4cbd-aba9-f877f4c9171b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.921608] env[69927]: DEBUG oslo_vmware.api [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1169.921608] env[69927]: value = "task-4096706" [ 1169.921608] env[69927]: _type = "Task" [ 1169.921608] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.935363] env[69927]: DEBUG oslo_vmware.api [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096706, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.143661] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1170.261925] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096702, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.277476] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096703, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073817} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.277759] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1170.278926] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e80997-66a9-493e-8f8e-c41cc4535888 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.305844] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 6828dc80-2e0e-4715-a620-42edbe5eec2f/6828dc80-2e0e-4715-a620-42edbe5eec2f.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1170.307099] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d0c6d30-e245-4c8d-9ce8-221be92582e4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.327485] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1170.327485] env[69927]: value = "task-4096707" [ 1170.327485] env[69927]: _type = "Task" [ 1170.327485] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.335307] env[69927]: DEBUG oslo_vmware.api [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096704, 'name': ReconfigVM_Task, 'duration_secs': 0.27869} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.336118] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811540', 'volume_id': '110b2a96-6541-4296-9d43-a5c1d0562ba9', 'name': 'volume-110b2a96-6541-4296-9d43-a5c1d0562ba9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3', 'attached_at': '', 'detached_at': '', 'volume_id': '110b2a96-6541-4296-9d43-a5c1d0562ba9', 'serial': '110b2a96-6541-4296-9d43-a5c1d0562ba9'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1170.336460] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1170.337289] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c71b210-c322-4f83-8f07-c1c4aa2d5c4b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.345638] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096707, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.351351] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1170.351946] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3f4830b5-fcce-4533-8048-b88300878d47 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.433332] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1170.433581] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1170.433764] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Deleting the datastore file [datastore1] a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1170.434057] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e9ee7163-1869-4d06-9616-7fc14f434ad0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.441036] env[69927]: DEBUG oslo_vmware.api [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096706, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.501195} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.442289] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1170.442960] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1170.443582] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1170.444609] env[69927]: INFO nova.compute.manager [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1170.444786] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1170.445075] env[69927]: DEBUG nova.compute.manager [-] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1170.445183] env[69927]: DEBUG nova.network.neutron [-] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1170.448674] env[69927]: DEBUG oslo_vmware.api [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Waiting for the task: (returnval){ [ 1170.448674] env[69927]: value = "task-4096709" [ 1170.448674] env[69927]: _type = "Task" [ 1170.448674] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.460703] env[69927]: DEBUG oslo_vmware.api [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096709, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.569831] env[69927]: DEBUG oslo_concurrency.lockutils [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "8b70b479-4a54-4bcb-813d-16cc0c9a67c5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1170.569831] env[69927]: DEBUG oslo_concurrency.lockutils [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "8b70b479-4a54-4bcb-813d-16cc0c9a67c5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1170.761791] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096702, 'name': ReconfigVM_Task, 'duration_secs': 0.775836} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.764494] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Reconfigured VM instance instance-00000066 to attach disk [datastore2] da468d11-82a4-4fec-b06a-1b522bacdbc2/da468d11-82a4-4fec-b06a-1b522bacdbc2.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1170.765298] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bd815b06-2c20-4607-8572-3c5dbaa15dd7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.772817] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1170.772817] env[69927]: value = "task-4096710" [ 1170.772817] env[69927]: _type = "Task" [ 1170.772817] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.784692] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096710, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.840642] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096707, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.884491] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5164c2b0-ff48-43e0-9e68-f1156489d95c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.894736] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2923dcb2-574d-47e0-ade6-283453d0ae47 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.934804] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6061a3fe-77e9-438e-af9c-3f1cc1ea1079 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.938498] env[69927]: DEBUG nova.compute.manager [req-bb16900e-0b75-4010-8065-5589417e6306 req-66326b45-147c-4bcc-a24d-e9620fd7b803 service nova] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Received event network-vif-deleted-6252a6ad-27a0-47fd-8f00-60c71ed01985 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1170.938707] env[69927]: INFO nova.compute.manager [req-bb16900e-0b75-4010-8065-5589417e6306 req-66326b45-147c-4bcc-a24d-e9620fd7b803 service nova] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Neutron deleted interface 6252a6ad-27a0-47fd-8f00-60c71ed01985; detaching it from the instance and deleting it from the info cache [ 1170.938891] env[69927]: DEBUG nova.network.neutron [req-bb16900e-0b75-4010-8065-5589417e6306 req-66326b45-147c-4bcc-a24d-e9620fd7b803 service nova] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.948561] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de54d1d5-1829-44cd-bc5e-149fb5ccc70b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.970127] env[69927]: DEBUG oslo_vmware.api [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Task: {'id': task-4096709, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096279} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.970610] env[69927]: DEBUG nova.compute.provider_tree [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1170.971992] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1170.973243] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1170.973243] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1171.035895] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Volume detach. Driver type: vmdk {{(pid=69927) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1171.036261] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cd074ab9-e752-4248-ba66-5892becc9e92 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.047790] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ddab120-969d-49e1-bfa2-82b3f35568f6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.072772] env[69927]: DEBUG nova.compute.manager [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1171.089667] env[69927]: ERROR nova.compute.manager [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Failed to detach volume 110b2a96-6541-4296-9d43-a5c1d0562ba9 from /dev/sda: nova.exception.InstanceNotFound: Instance a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3 could not be found. [ 1171.089667] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Traceback (most recent call last): [ 1171.089667] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1171.089667] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] self.driver.rebuild(**kwargs) [ 1171.089667] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1171.089667] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] raise NotImplementedError() [ 1171.089667] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] NotImplementedError [ 1171.089667] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] [ 1171.089667] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] During handling of the above exception, another exception occurred: [ 1171.089667] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] [ 1171.089667] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Traceback (most recent call last): [ 1171.089667] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1171.089667] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] self.driver.detach_volume(context, old_connection_info, [ 1171.089667] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1171.089667] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] return self._volumeops.detach_volume(connection_info, instance) [ 1171.089667] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1171.089667] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] self._detach_volume_vmdk(connection_info, instance) [ 1171.089667] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1171.089667] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1171.089667] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1171.089667] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] stable_ref.fetch_moref(session) [ 1171.089667] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1171.089667] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1171.089667] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] nova.exception.InstanceNotFound: Instance a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3 could not be found. [ 1171.089667] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] [ 1171.242615] env[69927]: DEBUG nova.compute.utils [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Build of instance a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3 aborted: Failed to rebuild volume backed instance. {{(pid=69927) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 1171.246445] env[69927]: ERROR nova.compute.manager [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3 aborted: Failed to rebuild volume backed instance. [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Traceback (most recent call last): [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] self.driver.rebuild(**kwargs) [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] raise NotImplementedError() [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] NotImplementedError [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] During handling of the above exception, another exception occurred: [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Traceback (most recent call last): [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] self._detach_root_volume(context, instance, root_bdm) [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] with excutils.save_and_reraise_exception(): [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] self.force_reraise() [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] raise self.value [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] self.driver.detach_volume(context, old_connection_info, [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] return self._volumeops.detach_volume(connection_info, instance) [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] self._detach_volume_vmdk(connection_info, instance) [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] stable_ref.fetch_moref(session) [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] nova.exception.InstanceNotFound: Instance a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3 could not be found. [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] During handling of the above exception, another exception occurred: [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Traceback (most recent call last): [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/nova/nova/compute/manager.py", line 11471, in _error_out_instance_on_exception [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] yield [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 1171.246445] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] self._do_rebuild_instance_with_claim( [ 1171.247869] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 1171.247869] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] self._do_rebuild_instance( [ 1171.247869] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 1171.247869] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] self._rebuild_default_impl(**kwargs) [ 1171.247869] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 1171.247869] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] self._rebuild_volume_backed_instance( [ 1171.247869] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 1171.247869] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] raise exception.BuildAbortException( [ 1171.247869] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] nova.exception.BuildAbortException: Build of instance a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3 aborted: Failed to rebuild volume backed instance. [ 1171.247869] env[69927]: ERROR nova.compute.manager [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] [ 1171.289447] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096710, 'name': Rename_Task, 'duration_secs': 0.270818} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.289896] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1171.290911] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-299d3f79-8bb8-4821-a784-f0602b504cfa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.301459] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1171.301459] env[69927]: value = "task-4096711" [ 1171.301459] env[69927]: _type = "Task" [ 1171.301459] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.312826] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096711, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.340910] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096707, 'name': ReconfigVM_Task, 'duration_secs': 0.53408} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.341153] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 6828dc80-2e0e-4715-a620-42edbe5eec2f/6828dc80-2e0e-4715-a620-42edbe5eec2f.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1171.341802] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e0d88fd1-ddc9-4201-8d72-f90803207a7f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.351051] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1171.351051] env[69927]: value = "task-4096712" [ 1171.351051] env[69927]: _type = "Task" [ 1171.351051] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.362474] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096712, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.403039] env[69927]: DEBUG nova.network.neutron [-] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.444055] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-406f4c9b-074c-4beb-9bd1-12d96dbdb93b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.455712] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-626a2d6d-463d-42a8-b931-edfe96f97fb1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.475563] env[69927]: DEBUG nova.scheduler.client.report [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1171.492054] env[69927]: DEBUG nova.compute.manager [req-bb16900e-0b75-4010-8065-5589417e6306 req-66326b45-147c-4bcc-a24d-e9620fd7b803 service nova] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Detach interface failed, port_id=6252a6ad-27a0-47fd-8f00-60c71ed01985, reason: Instance ba7989a1-a644-4eb7-bf65-20ca1810dd62 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1171.618568] env[69927]: DEBUG oslo_concurrency.lockutils [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.815210] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096711, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.861706] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096712, 'name': Rename_Task, 'duration_secs': 0.168476} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.862165] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1171.862463] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ce5af25-f242-48fd-b237-f9cd13b482ce {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.870723] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1171.870723] env[69927]: value = "task-4096713" [ 1171.870723] env[69927]: _type = "Task" [ 1171.870723] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.880365] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096713, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.907526] env[69927]: INFO nova.compute.manager [-] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Took 1.46 seconds to deallocate network for instance. [ 1171.981752] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.362s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.985101] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.429s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.987390] env[69927]: INFO nova.compute.claims [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1172.031410] env[69927]: INFO nova.network.neutron [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Updating port eb2105ba-0276-4bc6-a2af-933090d4cdcd with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1172.314346] env[69927]: DEBUG oslo_vmware.api [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096711, 'name': PowerOnVM_Task, 'duration_secs': 0.658448} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.316079] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1172.316079] env[69927]: INFO nova.compute.manager [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Took 11.21 seconds to spawn the instance on the hypervisor. [ 1172.316079] env[69927]: DEBUG nova.compute.manager [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1172.316079] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec2fe43-ab3d-4040-82d7-8e29521f99ef {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.385570] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096713, 'name': PowerOnVM_Task} progress is 87%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.414614] env[69927]: DEBUG oslo_concurrency.lockutils [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.835307] env[69927]: INFO nova.compute.manager [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Took 25.44 seconds to build instance. [ 1172.885577] env[69927]: DEBUG oslo_vmware.api [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096713, 'name': PowerOnVM_Task, 'duration_secs': 0.688327} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.885975] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1172.886168] env[69927]: INFO nova.compute.manager [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Took 8.69 seconds to spawn the instance on the hypervisor. [ 1172.886606] env[69927]: DEBUG nova.compute.manager [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1172.887292] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44d8bbd5-4862-447b-b15b-1167b21a1c07 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.259740] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1173.278176] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc60537-c591-4b59-a87c-0ee28dd9a8bc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.286468] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df5fd25c-c5f8-4057-829e-c4a62395d401 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.319595] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-316decc7-4ba4-4467-af83-410f2e7ce224 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.328299] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e44dc7-7d6c-4ae2-b9e0-031c880d80a1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.346704] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c4ea1f78-ef1d-4239-aac5-36f58c607499 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "da468d11-82a4-4fec-b06a-1b522bacdbc2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.964s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1173.347294] env[69927]: DEBUG nova.compute.provider_tree [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1173.410121] env[69927]: INFO nova.compute.manager [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Took 25.14 seconds to build instance. [ 1173.590178] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "b007a697-7da4-4c97-9ccb-046d86b27568" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1173.590505] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "b007a697-7da4-4c97-9ccb-046d86b27568" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1173.643554] env[69927]: DEBUG nova.compute.manager [req-48348c50-ab27-433c-9040-424b3f2955b8 req-a1c53ea5-71ce-46bb-a196-bef350faa4e5 service nova] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Received event network-vif-plugged-eb2105ba-0276-4bc6-a2af-933090d4cdcd {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1173.643554] env[69927]: DEBUG oslo_concurrency.lockutils [req-48348c50-ab27-433c-9040-424b3f2955b8 req-a1c53ea5-71ce-46bb-a196-bef350faa4e5 service nova] Acquiring lock "cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1173.643554] env[69927]: DEBUG oslo_concurrency.lockutils [req-48348c50-ab27-433c-9040-424b3f2955b8 req-a1c53ea5-71ce-46bb-a196-bef350faa4e5 service nova] Lock "cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1173.643554] env[69927]: DEBUG oslo_concurrency.lockutils [req-48348c50-ab27-433c-9040-424b3f2955b8 req-a1c53ea5-71ce-46bb-a196-bef350faa4e5 service nova] Lock "cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1173.643702] env[69927]: DEBUG nova.compute.manager [req-48348c50-ab27-433c-9040-424b3f2955b8 req-a1c53ea5-71ce-46bb-a196-bef350faa4e5 service nova] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] No waiting events found dispatching network-vif-plugged-eb2105ba-0276-4bc6-a2af-933090d4cdcd {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1173.643738] env[69927]: WARNING nova.compute.manager [req-48348c50-ab27-433c-9040-424b3f2955b8 req-a1c53ea5-71ce-46bb-a196-bef350faa4e5 service nova] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Received unexpected event network-vif-plugged-eb2105ba-0276-4bc6-a2af-933090d4cdcd for instance with vm_state shelved_offloaded and task_state spawning. [ 1173.851123] env[69927]: DEBUG nova.scheduler.client.report [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1173.891237] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquiring lock "refresh_cache-cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1173.891540] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquired lock "refresh_cache-cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1173.891730] env[69927]: DEBUG nova.network.neutron [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1173.893035] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e539d65d-0cc0-4e95-b6e7-94e327e1dbd7 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Acquiring lock "a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1173.893255] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e539d65d-0cc0-4e95-b6e7-94e327e1dbd7 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Lock "a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1173.893459] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e539d65d-0cc0-4e95-b6e7-94e327e1dbd7 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Acquiring lock "a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1173.893641] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e539d65d-0cc0-4e95-b6e7-94e327e1dbd7 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Lock "a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1173.893806] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e539d65d-0cc0-4e95-b6e7-94e327e1dbd7 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Lock "a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1173.895841] env[69927]: INFO nova.compute.manager [None req-e539d65d-0cc0-4e95-b6e7-94e327e1dbd7 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Terminating instance [ 1173.914891] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d3ddb4a1-c821-4aac-98e2-c6393aa6eed9 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "6828dc80-2e0e-4715-a620-42edbe5eec2f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.655s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.092567] env[69927]: DEBUG nova.compute.manager [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1174.358783] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.371s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.358783] env[69927]: DEBUG nova.compute.manager [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1174.365326] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.838s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1174.365702] env[69927]: INFO nova.compute.claims [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1174.404228] env[69927]: DEBUG nova.compute.manager [None req-e539d65d-0cc0-4e95-b6e7-94e327e1dbd7 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1174.404637] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-49cffc24-2733-4f95-bf35-8f04394bc61e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.418489] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22773233-46b7-4369-809a-01e663d87b5e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.467788] env[69927]: WARNING nova.virt.vmwareapi.driver [None req-e539d65d-0cc0-4e95-b6e7-94e327e1dbd7 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3 could not be found. [ 1174.468145] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e539d65d-0cc0-4e95-b6e7-94e327e1dbd7 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1174.468982] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b33e4457-bbac-425e-addf-cf6c5615fde2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.481714] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2916c75b-4b5d-4b09-a69a-3be68be8b7c7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.501267] env[69927]: DEBUG nova.compute.manager [req-fd86321d-104a-4099-8a6e-ef75aec831cb req-f632637a-2606-47a8-8876-2ded325af4c0 service nova] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Received event network-changed-47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1174.503039] env[69927]: DEBUG nova.compute.manager [req-fd86321d-104a-4099-8a6e-ef75aec831cb req-f632637a-2606-47a8-8876-2ded325af4c0 service nova] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Refreshing instance network info cache due to event network-changed-47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1174.503039] env[69927]: DEBUG oslo_concurrency.lockutils [req-fd86321d-104a-4099-8a6e-ef75aec831cb req-f632637a-2606-47a8-8876-2ded325af4c0 service nova] Acquiring lock "refresh_cache-da468d11-82a4-4fec-b06a-1b522bacdbc2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1174.503039] env[69927]: DEBUG oslo_concurrency.lockutils [req-fd86321d-104a-4099-8a6e-ef75aec831cb req-f632637a-2606-47a8-8876-2ded325af4c0 service nova] Acquired lock "refresh_cache-da468d11-82a4-4fec-b06a-1b522bacdbc2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1174.503039] env[69927]: DEBUG nova.network.neutron [req-fd86321d-104a-4099-8a6e-ef75aec831cb req-f632637a-2606-47a8-8876-2ded325af4c0 service nova] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Refreshing network info cache for port 47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1174.524910] env[69927]: WARNING nova.virt.vmwareapi.vmops [None req-e539d65d-0cc0-4e95-b6e7-94e327e1dbd7 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3 could not be found. [ 1174.524910] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e539d65d-0cc0-4e95-b6e7-94e327e1dbd7 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1174.525085] env[69927]: INFO nova.compute.manager [None req-e539d65d-0cc0-4e95-b6e7-94e327e1dbd7 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Took 0.12 seconds to destroy the instance on the hypervisor. [ 1174.525354] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e539d65d-0cc0-4e95-b6e7-94e327e1dbd7 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1174.528633] env[69927]: DEBUG nova.compute.manager [-] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1174.528737] env[69927]: DEBUG nova.network.neutron [-] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1174.621638] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1174.681111] env[69927]: DEBUG nova.network.neutron [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Updating instance_info_cache with network_info: [{"id": "eb2105ba-0276-4bc6-a2af-933090d4cdcd", "address": "fa:16:3e:23:0c:58", "network": {"id": "16f178eb-5c9f-4d2d-bde1-6816bb4e832b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1614734358-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef85ff9fc3d240a8a24b6cea8dda0f6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb2105ba-02", "ovs_interfaceid": "eb2105ba-0276-4bc6-a2af-933090d4cdcd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1174.872329] env[69927]: DEBUG nova.compute.utils [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1174.876484] env[69927]: DEBUG nova.compute.manager [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1174.877826] env[69927]: DEBUG nova.network.neutron [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1174.943842] env[69927]: DEBUG nova.policy [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ed20f23b4104e2ea75ea29b804c79d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ed984d7170742eca7e89bf3bf45e6ae', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1175.185331] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Releasing lock "refresh_cache-cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1175.223722] env[69927]: DEBUG nova.virt.hardware [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='a64010b24fe1ab608cda2c22a7d74d08',container_format='bare',created_at=2025-05-13T19:44:45Z,direct_url=,disk_format='vmdk',id=67c6308c-617a-40e4-b8e7-aa77c737c6ac,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-915633514-shelved',owner='ef85ff9fc3d240a8a24b6cea8dda0f6f',properties=ImageMetaProps,protected=,size=31666176,status='active',tags=,updated_at=2025-05-13T19:45:01Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1175.223978] env[69927]: DEBUG nova.virt.hardware [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1175.224831] env[69927]: DEBUG nova.virt.hardware [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1175.224831] env[69927]: DEBUG nova.virt.hardware [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1175.224831] env[69927]: DEBUG nova.virt.hardware [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1175.224831] env[69927]: DEBUG nova.virt.hardware [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1175.225184] env[69927]: DEBUG nova.virt.hardware [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1175.225297] env[69927]: DEBUG nova.virt.hardware [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1175.225516] env[69927]: DEBUG nova.virt.hardware [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1175.225723] env[69927]: DEBUG nova.virt.hardware [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1175.225939] env[69927]: DEBUG nova.virt.hardware [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1175.226967] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40ce1eda-04f0-4ae4-87c7-5214c6cac1b3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.237233] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed931c95-5a45-4c21-8993-3e27934ced0b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.257983] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:0c:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dd7d0d95-6848-4e69-ac21-75f8db82a3b5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eb2105ba-0276-4bc6-a2af-933090d4cdcd', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1175.266026] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1175.267025] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1175.267025] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ecda6761-9d2c-4e61-851b-f70d3c18a146 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.291183] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1175.291183] env[69927]: value = "task-4096714" [ 1175.291183] env[69927]: _type = "Task" [ 1175.291183] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.301111] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096714, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.378045] env[69927]: DEBUG nova.compute.manager [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1175.403155] env[69927]: DEBUG nova.network.neutron [req-fd86321d-104a-4099-8a6e-ef75aec831cb req-f632637a-2606-47a8-8876-2ded325af4c0 service nova] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Updated VIF entry in instance network info cache for port 47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1175.403740] env[69927]: DEBUG nova.network.neutron [req-fd86321d-104a-4099-8a6e-ef75aec831cb req-f632637a-2606-47a8-8876-2ded325af4c0 service nova] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Updating instance_info_cache with network_info: [{"id": "47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07", "address": "fa:16:3e:ed:aa:67", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47e9852f-a4", "ovs_interfaceid": "47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1175.519278] env[69927]: DEBUG nova.network.neutron [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Successfully created port: a54251d6-cc17-4c26-95aa-a11a00c08e5e {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1175.651167] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "6828dc80-2e0e-4715-a620-42edbe5eec2f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.654636] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "6828dc80-2e0e-4715-a620-42edbe5eec2f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.654636] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "6828dc80-2e0e-4715-a620-42edbe5eec2f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.654636] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "6828dc80-2e0e-4715-a620-42edbe5eec2f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.654636] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "6828dc80-2e0e-4715-a620-42edbe5eec2f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.656844] env[69927]: INFO nova.compute.manager [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Terminating instance [ 1175.695196] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c007279-f66a-482c-bb00-a1a2afa83723 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.707088] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c70bdd-d5be-43ee-8470-b3713557cb0f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.744114] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ea860f-56a5-47e8-a711-d302eb1cae54 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.754175] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19151c6e-ad33-4799-85c8-012be7f922d4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.785822] env[69927]: DEBUG nova.compute.provider_tree [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1175.803075] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096714, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.872233] env[69927]: DEBUG nova.compute.manager [req-5d82649d-59ed-46b5-85d8-d15606361b6e req-3e9c3713-065c-47e7-a896-0e422fcdca39 service nova] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Received event network-changed-eb2105ba-0276-4bc6-a2af-933090d4cdcd {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1175.872398] env[69927]: DEBUG nova.compute.manager [req-5d82649d-59ed-46b5-85d8-d15606361b6e req-3e9c3713-065c-47e7-a896-0e422fcdca39 service nova] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Refreshing instance network info cache due to event network-changed-eb2105ba-0276-4bc6-a2af-933090d4cdcd. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1175.872785] env[69927]: DEBUG oslo_concurrency.lockutils [req-5d82649d-59ed-46b5-85d8-d15606361b6e req-3e9c3713-065c-47e7-a896-0e422fcdca39 service nova] Acquiring lock "refresh_cache-cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1175.872785] env[69927]: DEBUG oslo_concurrency.lockutils [req-5d82649d-59ed-46b5-85d8-d15606361b6e req-3e9c3713-065c-47e7-a896-0e422fcdca39 service nova] Acquired lock "refresh_cache-cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1175.872886] env[69927]: DEBUG nova.network.neutron [req-5d82649d-59ed-46b5-85d8-d15606361b6e req-3e9c3713-065c-47e7-a896-0e422fcdca39 service nova] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Refreshing network info cache for port eb2105ba-0276-4bc6-a2af-933090d4cdcd {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1175.906476] env[69927]: DEBUG oslo_concurrency.lockutils [req-fd86321d-104a-4099-8a6e-ef75aec831cb req-f632637a-2606-47a8-8876-2ded325af4c0 service nova] Releasing lock "refresh_cache-da468d11-82a4-4fec-b06a-1b522bacdbc2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1176.162321] env[69927]: DEBUG nova.compute.manager [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1176.162846] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1176.163698] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a91e80f1-bb4c-4070-b520-4c6f412c481a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.172672] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1176.172973] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a991c0a1-36d2-476e-8d09-ba81db882b69 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.181121] env[69927]: DEBUG oslo_vmware.api [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1176.181121] env[69927]: value = "task-4096715" [ 1176.181121] env[69927]: _type = "Task" [ 1176.181121] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.184483] env[69927]: DEBUG nova.network.neutron [-] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1176.191303] env[69927]: DEBUG oslo_vmware.api [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096715, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.289098] env[69927]: DEBUG nova.scheduler.client.report [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1176.304343] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096714, 'name': CreateVM_Task, 'duration_secs': 0.520498} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.304678] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1176.305516] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/67c6308c-617a-40e4-b8e7-aa77c737c6ac" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1176.305941] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquired lock "[datastore1] devstack-image-cache_base/67c6308c-617a-40e4-b8e7-aa77c737c6ac" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1176.306111] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/67c6308c-617a-40e4-b8e7-aa77c737c6ac" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1176.306425] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b287f47-725d-4263-88b2-587c9562b8e1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.314255] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1176.314255] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d9e8f6-5d64-4c33-ba8a-0ae202031a56" [ 1176.314255] env[69927]: _type = "Task" [ 1176.314255] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.324985] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d9e8f6-5d64-4c33-ba8a-0ae202031a56, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.392213] env[69927]: DEBUG nova.compute.manager [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1176.429234] env[69927]: DEBUG nova.virt.hardware [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1176.429772] env[69927]: DEBUG nova.virt.hardware [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1176.430018] env[69927]: DEBUG nova.virt.hardware [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1176.430237] env[69927]: DEBUG nova.virt.hardware [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1176.430418] env[69927]: DEBUG nova.virt.hardware [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1176.430687] env[69927]: DEBUG nova.virt.hardware [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1176.431099] env[69927]: DEBUG nova.virt.hardware [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1176.431352] env[69927]: DEBUG nova.virt.hardware [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1176.431545] env[69927]: DEBUG nova.virt.hardware [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1176.431716] env[69927]: DEBUG nova.virt.hardware [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1176.432041] env[69927]: DEBUG nova.virt.hardware [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1176.433971] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6143b4b9-989c-4685-8e5f-180c6b934bb1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.444225] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d614c7-61a4-4997-91a5-bed8e4ed1878 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.689408] env[69927]: INFO nova.compute.manager [-] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Took 2.16 seconds to deallocate network for instance. [ 1176.695406] env[69927]: DEBUG oslo_vmware.api [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096715, 'name': PowerOffVM_Task, 'duration_secs': 0.362282} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.696950] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1176.697419] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1176.698648] env[69927]: DEBUG nova.compute.manager [req-a45bb7d9-a991-41ca-a9f8-3cf915606ee2 req-c8836bf9-53f7-445c-a042-a9449602121a service nova] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Received event network-vif-deleted-358ca610-8c4b-4e27-99df-97c37c69074f {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1176.699333] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9c6f2edd-9636-4c73-88b1-451ee2f7b13b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.771024] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1176.771024] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1176.771024] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Deleting the datastore file [datastore2] 6828dc80-2e0e-4715-a620-42edbe5eec2f {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1176.771024] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-28a44cf2-f0f5-407b-b60a-8b851888ee66 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.780367] env[69927]: DEBUG oslo_vmware.api [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for the task: (returnval){ [ 1176.780367] env[69927]: value = "task-4096717" [ 1176.780367] env[69927]: _type = "Task" [ 1176.780367] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.789704] env[69927]: DEBUG oslo_vmware.api [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096717, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.798434] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.435s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1176.803581] env[69927]: DEBUG nova.compute.manager [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1176.805213] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 6.662s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1176.805788] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1176.806116] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69927) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1176.807014] env[69927]: DEBUG oslo_concurrency.lockutils [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.189s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1176.808727] env[69927]: INFO nova.compute.claims [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1176.812608] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a15c7db4-299b-486c-9ae5-a8724ed1c86b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.836219] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b280a3-be31-46a2-9b48-e7d45dd1ec95 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.840596] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Releasing lock "[datastore1] devstack-image-cache_base/67c6308c-617a-40e4-b8e7-aa77c737c6ac" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1176.841145] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Processing image 67c6308c-617a-40e4-b8e7-aa77c737c6ac {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1176.841562] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/67c6308c-617a-40e4-b8e7-aa77c737c6ac/67c6308c-617a-40e4-b8e7-aa77c737c6ac.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1176.841824] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquired lock "[datastore1] devstack-image-cache_base/67c6308c-617a-40e4-b8e7-aa77c737c6ac/67c6308c-617a-40e4-b8e7-aa77c737c6ac.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1176.842120] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1176.844167] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-389e72dd-3219-4be8-8a77-7d388b1d2a27 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.863191] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c720b2-2a4c-46aa-9e8a-85c1cd090186 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.870369] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1176.870596] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1176.875115] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a3bb5fd-0dac-48a7-b8cb-f2f77ebb4f07 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.878741] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00bb276f-c005-4fb7-8ef7-de718c53889b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.887022] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1176.887022] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529e6a62-ec74-0b3d-1587-8f9943953f1e" [ 1176.887022] env[69927]: _type = "Task" [ 1176.887022] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.917134] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179625MB free_disk=17GB free_vcpus=48 pci_devices=None {{(pid=69927) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1176.917134] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1176.924379] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Preparing fetch location {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1176.924579] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Fetch image to [datastore1] OSTACK_IMG_d972d6e5-403f-4b8e-9bf2-d8bd5f5d121a/OSTACK_IMG_d972d6e5-403f-4b8e-9bf2-d8bd5f5d121a.vmdk {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1176.924840] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Downloading stream optimized image 67c6308c-617a-40e4-b8e7-aa77c737c6ac to [datastore1] OSTACK_IMG_d972d6e5-403f-4b8e-9bf2-d8bd5f5d121a/OSTACK_IMG_d972d6e5-403f-4b8e-9bf2-d8bd5f5d121a.vmdk on the data store datastore1 as vApp {{(pid=69927) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1176.924985] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Downloading image file data 67c6308c-617a-40e4-b8e7-aa77c737c6ac to the ESX as VM named 'OSTACK_IMG_d972d6e5-403f-4b8e-9bf2-d8bd5f5d121a' {{(pid=69927) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1176.947870] env[69927]: DEBUG nova.network.neutron [req-5d82649d-59ed-46b5-85d8-d15606361b6e req-3e9c3713-065c-47e7-a896-0e422fcdca39 service nova] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Updated VIF entry in instance network info cache for port eb2105ba-0276-4bc6-a2af-933090d4cdcd. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1176.948617] env[69927]: DEBUG nova.network.neutron [req-5d82649d-59ed-46b5-85d8-d15606361b6e req-3e9c3713-065c-47e7-a896-0e422fcdca39 service nova] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Updating instance_info_cache with network_info: [{"id": "eb2105ba-0276-4bc6-a2af-933090d4cdcd", "address": "fa:16:3e:23:0c:58", "network": {"id": "16f178eb-5c9f-4d2d-bde1-6816bb4e832b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1614734358-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef85ff9fc3d240a8a24b6cea8dda0f6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb2105ba-02", "ovs_interfaceid": "eb2105ba-0276-4bc6-a2af-933090d4cdcd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1177.027720] env[69927]: DEBUG oslo_vmware.rw_handles [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1177.027720] env[69927]: value = "resgroup-9" [ 1177.027720] env[69927]: _type = "ResourcePool" [ 1177.027720] env[69927]: }. {{(pid=69927) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1177.028289] env[69927]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-3bb1ecfd-7e0c-4d8f-bf7a-0a125a3348c0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.051141] env[69927]: DEBUG oslo_vmware.rw_handles [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lease: (returnval){ [ 1177.051141] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52c17945-387a-d695-cb8a-3bfb3b6a64f4" [ 1177.051141] env[69927]: _type = "HttpNfcLease" [ 1177.051141] env[69927]: } obtained for vApp import into resource pool (val){ [ 1177.051141] env[69927]: value = "resgroup-9" [ 1177.051141] env[69927]: _type = "ResourcePool" [ 1177.051141] env[69927]: }. {{(pid=69927) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1177.051481] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the lease: (returnval){ [ 1177.051481] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52c17945-387a-d695-cb8a-3bfb3b6a64f4" [ 1177.051481] env[69927]: _type = "HttpNfcLease" [ 1177.051481] env[69927]: } to be ready. {{(pid=69927) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1177.061628] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1177.061628] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52c17945-387a-d695-cb8a-3bfb3b6a64f4" [ 1177.061628] env[69927]: _type = "HttpNfcLease" [ 1177.061628] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1177.248237] env[69927]: INFO nova.compute.manager [None req-e539d65d-0cc0-4e95-b6e7-94e327e1dbd7 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Took 0.56 seconds to detach 1 volumes for instance. [ 1177.250721] env[69927]: DEBUG nova.compute.manager [None req-e539d65d-0cc0-4e95-b6e7-94e327e1dbd7 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Deleting volume: 110b2a96-6541-4296-9d43-a5c1d0562ba9 {{(pid=69927) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1177.293412] env[69927]: DEBUG oslo_vmware.api [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Task: {'id': task-4096717, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167804} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.293831] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1177.294712] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1177.294926] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1177.295142] env[69927]: INFO nova.compute.manager [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1177.295412] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1177.295656] env[69927]: DEBUG nova.compute.manager [-] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1177.295773] env[69927]: DEBUG nova.network.neutron [-] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1177.318070] env[69927]: DEBUG nova.compute.utils [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1177.318070] env[69927]: DEBUG nova.compute.manager [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1177.318070] env[69927]: DEBUG nova.network.neutron [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1177.327027] env[69927]: DEBUG nova.network.neutron [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Successfully updated port: a54251d6-cc17-4c26-95aa-a11a00c08e5e {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1177.417342] env[69927]: DEBUG nova.policy [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd89d0a2232b4da1a0b88799062fe8da', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3afde63c8cbe4aecb32a470fd6b948f6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1177.455071] env[69927]: DEBUG oslo_concurrency.lockutils [req-5d82649d-59ed-46b5-85d8-d15606361b6e req-3e9c3713-065c-47e7-a896-0e422fcdca39 service nova] Releasing lock "refresh_cache-cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1177.561857] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1177.561857] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52c17945-387a-d695-cb8a-3bfb3b6a64f4" [ 1177.561857] env[69927]: _type = "HttpNfcLease" [ 1177.561857] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1177.818806] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e539d65d-0cc0-4e95-b6e7-94e327e1dbd7 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1177.826621] env[69927]: DEBUG nova.compute.manager [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1177.832787] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "refresh_cache-5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1177.833475] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "refresh_cache-5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1177.834041] env[69927]: DEBUG nova.network.neutron [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1177.841147] env[69927]: DEBUG nova.network.neutron [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Successfully created port: 75ee960c-41d4-4858-8b1e-8198b77eb0d7 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1177.981640] env[69927]: DEBUG nova.compute.manager [req-ff15676a-b6db-46c7-b44f-9d862333ce54 req-643e98a3-ba69-46a5-af57-5dd2a318d0de service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Received event network-vif-plugged-a54251d6-cc17-4c26-95aa-a11a00c08e5e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1177.981792] env[69927]: DEBUG oslo_concurrency.lockutils [req-ff15676a-b6db-46c7-b44f-9d862333ce54 req-643e98a3-ba69-46a5-af57-5dd2a318d0de service nova] Acquiring lock "5c87c74d-5998-4dfc-bc3c-c2887ff25195-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1177.982208] env[69927]: DEBUG oslo_concurrency.lockutils [req-ff15676a-b6db-46c7-b44f-9d862333ce54 req-643e98a3-ba69-46a5-af57-5dd2a318d0de service nova] Lock "5c87c74d-5998-4dfc-bc3c-c2887ff25195-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1177.982814] env[69927]: DEBUG oslo_concurrency.lockutils [req-ff15676a-b6db-46c7-b44f-9d862333ce54 req-643e98a3-ba69-46a5-af57-5dd2a318d0de service nova] Lock "5c87c74d-5998-4dfc-bc3c-c2887ff25195-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1177.983281] env[69927]: DEBUG nova.compute.manager [req-ff15676a-b6db-46c7-b44f-9d862333ce54 req-643e98a3-ba69-46a5-af57-5dd2a318d0de service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] No waiting events found dispatching network-vif-plugged-a54251d6-cc17-4c26-95aa-a11a00c08e5e {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1177.983281] env[69927]: WARNING nova.compute.manager [req-ff15676a-b6db-46c7-b44f-9d862333ce54 req-643e98a3-ba69-46a5-af57-5dd2a318d0de service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Received unexpected event network-vif-plugged-a54251d6-cc17-4c26-95aa-a11a00c08e5e for instance with vm_state building and task_state spawning. [ 1177.983464] env[69927]: DEBUG nova.compute.manager [req-ff15676a-b6db-46c7-b44f-9d862333ce54 req-643e98a3-ba69-46a5-af57-5dd2a318d0de service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Received event network-changed-a54251d6-cc17-4c26-95aa-a11a00c08e5e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1177.983771] env[69927]: DEBUG nova.compute.manager [req-ff15676a-b6db-46c7-b44f-9d862333ce54 req-643e98a3-ba69-46a5-af57-5dd2a318d0de service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Refreshing instance network info cache due to event network-changed-a54251d6-cc17-4c26-95aa-a11a00c08e5e. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1177.983980] env[69927]: DEBUG oslo_concurrency.lockutils [req-ff15676a-b6db-46c7-b44f-9d862333ce54 req-643e98a3-ba69-46a5-af57-5dd2a318d0de service nova] Acquiring lock "refresh_cache-5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.064510] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1178.064510] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52c17945-387a-d695-cb8a-3bfb3b6a64f4" [ 1178.064510] env[69927]: _type = "HttpNfcLease" [ 1178.064510] env[69927]: } is ready. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1178.064810] env[69927]: DEBUG oslo_vmware.rw_handles [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1178.064810] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52c17945-387a-d695-cb8a-3bfb3b6a64f4" [ 1178.064810] env[69927]: _type = "HttpNfcLease" [ 1178.064810] env[69927]: }. {{(pid=69927) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1178.066481] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de476c5-3a01-4059-abea-fb79484cdedd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.080148] env[69927]: DEBUG oslo_vmware.rw_handles [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52579524-56b1-118e-c30b-14e9a517a2a0/disk-0.vmdk from lease info. {{(pid=69927) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1178.080370] env[69927]: DEBUG oslo_vmware.rw_handles [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Creating HTTP connection to write to file with size = 31666176 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52579524-56b1-118e-c30b-14e9a517a2a0/disk-0.vmdk. {{(pid=69927) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1178.146025] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-598a469b-0b91-4def-be52-12a6635acb66 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.163018] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66aed64-6f79-4974-ae18-c1e61c5f7cb2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.163654] env[69927]: DEBUG nova.network.neutron [-] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.170641] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d31401b8-f7d2-490b-a9cc-ad5e4a030e06 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.211250] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a8b20f-cf48-403d-9e87-930366190df6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.222117] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ecbe50-1f3c-495b-93ad-dddbdbf1e6bc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.240510] env[69927]: DEBUG nova.compute.provider_tree [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1178.380889] env[69927]: DEBUG nova.network.neutron [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1178.582047] env[69927]: DEBUG nova.network.neutron [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Updating instance_info_cache with network_info: [{"id": "a54251d6-cc17-4c26-95aa-a11a00c08e5e", "address": "fa:16:3e:58:62:a1", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa54251d6-cc", "ovs_interfaceid": "a54251d6-cc17-4c26-95aa-a11a00c08e5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.670222] env[69927]: INFO nova.compute.manager [-] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Took 1.37 seconds to deallocate network for instance. [ 1178.742712] env[69927]: DEBUG nova.scheduler.client.report [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1178.748722] env[69927]: DEBUG nova.compute.manager [req-6ccb81bc-459c-4360-811d-60390b5a6465 req-12353cee-0ce9-4722-a308-6827d887f431 service nova] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Received event network-vif-deleted-f232a8db-809f-43b2-ae66-27047a39c8bf {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1178.844822] env[69927]: DEBUG nova.compute.manager [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1178.878088] env[69927]: DEBUG nova.virt.hardware [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1178.878088] env[69927]: DEBUG nova.virt.hardware [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1178.878313] env[69927]: DEBUG nova.virt.hardware [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1178.878392] env[69927]: DEBUG nova.virt.hardware [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1178.879048] env[69927]: DEBUG nova.virt.hardware [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1178.879048] env[69927]: DEBUG nova.virt.hardware [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1178.879048] env[69927]: DEBUG nova.virt.hardware [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1178.879351] env[69927]: DEBUG nova.virt.hardware [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1178.879351] env[69927]: DEBUG nova.virt.hardware [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1178.879462] env[69927]: DEBUG nova.virt.hardware [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1178.879671] env[69927]: DEBUG nova.virt.hardware [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1178.880678] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25837552-c066-45d1-a952-cb91fef4003f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.896608] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7384c35-54e2-440d-8ab9-ddfc5cd9c089 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.085751] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "refresh_cache-5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1179.085751] env[69927]: DEBUG nova.compute.manager [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Instance network_info: |[{"id": "a54251d6-cc17-4c26-95aa-a11a00c08e5e", "address": "fa:16:3e:58:62:a1", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa54251d6-cc", "ovs_interfaceid": "a54251d6-cc17-4c26-95aa-a11a00c08e5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1179.088539] env[69927]: DEBUG oslo_concurrency.lockutils [req-ff15676a-b6db-46c7-b44f-9d862333ce54 req-643e98a3-ba69-46a5-af57-5dd2a318d0de service nova] Acquired lock "refresh_cache-5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1179.088839] env[69927]: DEBUG nova.network.neutron [req-ff15676a-b6db-46c7-b44f-9d862333ce54 req-643e98a3-ba69-46a5-af57-5dd2a318d0de service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Refreshing network info cache for port a54251d6-cc17-4c26-95aa-a11a00c08e5e {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1179.090361] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:62:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5b21ab10-d886-4453-9472-9e11fb3c450d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a54251d6-cc17-4c26-95aa-a11a00c08e5e', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1179.099682] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1179.102840] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1179.103898] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7e2d98f6-2c58-45cc-bbb3-622c480b6b9f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.130366] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1179.130366] env[69927]: value = "task-4096720" [ 1179.130366] env[69927]: _type = "Task" [ 1179.130366] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.143861] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096720, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.179840] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1179.253857] env[69927]: DEBUG oslo_concurrency.lockutils [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.447s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1179.254647] env[69927]: DEBUG nova.compute.manager [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1179.258039] env[69927]: DEBUG oslo_concurrency.lockutils [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.844s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1179.258352] env[69927]: DEBUG nova.objects.instance [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lazy-loading 'resources' on Instance uuid ba7989a1-a644-4eb7-bf65-20ca1810dd62 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1179.458386] env[69927]: DEBUG oslo_vmware.rw_handles [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Completed reading data from the image iterator. {{(pid=69927) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1179.458642] env[69927]: DEBUG oslo_vmware.rw_handles [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52579524-56b1-118e-c30b-14e9a517a2a0/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1179.459571] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-317037b7-45c9-4370-9250-1de0a60b98ef {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.467303] env[69927]: DEBUG oslo_vmware.rw_handles [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52579524-56b1-118e-c30b-14e9a517a2a0/disk-0.vmdk is in state: ready. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1179.467406] env[69927]: DEBUG oslo_vmware.rw_handles [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52579524-56b1-118e-c30b-14e9a517a2a0/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1179.467580] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-1ea837fe-ec18-41e5-9fcd-2f3706a50d99 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.643283] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096720, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.662128] env[69927]: DEBUG nova.network.neutron [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Successfully updated port: 75ee960c-41d4-4858-8b1e-8198b77eb0d7 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1179.714258] env[69927]: DEBUG oslo_vmware.rw_handles [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52579524-56b1-118e-c30b-14e9a517a2a0/disk-0.vmdk. {{(pid=69927) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1179.714462] env[69927]: INFO nova.virt.vmwareapi.images [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Downloaded image file data 67c6308c-617a-40e4-b8e7-aa77c737c6ac [ 1179.718034] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb6a162-6411-47d4-828f-eb44b0371d47 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.734452] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a5662a89-76a7-42cc-b14b-826867dca635 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.761880] env[69927]: DEBUG nova.compute.utils [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1179.764451] env[69927]: INFO nova.virt.vmwareapi.images [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] The imported VM was unregistered [ 1179.767218] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Caching image {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1179.767471] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Creating directory with path [datastore1] devstack-image-cache_base/67c6308c-617a-40e4-b8e7-aa77c737c6ac {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1179.770930] env[69927]: DEBUG nova.compute.manager [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1179.771303] env[69927]: DEBUG nova.network.neutron [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1179.776346] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a261bbd9-4295-4b66-87ff-7eed30205b37 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.793108] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Created directory with path [datastore1] devstack-image-cache_base/67c6308c-617a-40e4-b8e7-aa77c737c6ac {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1179.793297] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_d972d6e5-403f-4b8e-9bf2-d8bd5f5d121a/OSTACK_IMG_d972d6e5-403f-4b8e-9bf2-d8bd5f5d121a.vmdk to [datastore1] devstack-image-cache_base/67c6308c-617a-40e4-b8e7-aa77c737c6ac/67c6308c-617a-40e4-b8e7-aa77c737c6ac.vmdk. {{(pid=69927) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1179.793563] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-e5e10412-0990-4d28-8d2a-eafd5d32d60b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.804687] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1179.804687] env[69927]: value = "task-4096722" [ 1179.804687] env[69927]: _type = "Task" [ 1179.804687] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.813897] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096722, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.824224] env[69927]: DEBUG nova.policy [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '16c1e562693c466c8786016a777f9f32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9cf6bb3492c642aa9a168e484299289c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1180.018436] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-631b9f5a-2b3d-428e-a66d-1a1338071bed {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.036115] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cd19dae-36f5-415d-a2fe-67b4c0855939 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.042441] env[69927]: DEBUG nova.compute.manager [req-7037a4d6-f12f-483f-ace8-8b51315bb622 req-d79458fb-d5b2-4369-95f6-a45c3663d0fe service nova] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Received event network-vif-plugged-75ee960c-41d4-4858-8b1e-8198b77eb0d7 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1180.042755] env[69927]: DEBUG oslo_concurrency.lockutils [req-7037a4d6-f12f-483f-ace8-8b51315bb622 req-d79458fb-d5b2-4369-95f6-a45c3663d0fe service nova] Acquiring lock "56aec5c2-d344-4a8d-a55a-930bc425150a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1180.043051] env[69927]: DEBUG oslo_concurrency.lockutils [req-7037a4d6-f12f-483f-ace8-8b51315bb622 req-d79458fb-d5b2-4369-95f6-a45c3663d0fe service nova] Lock "56aec5c2-d344-4a8d-a55a-930bc425150a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1180.043311] env[69927]: DEBUG oslo_concurrency.lockutils [req-7037a4d6-f12f-483f-ace8-8b51315bb622 req-d79458fb-d5b2-4369-95f6-a45c3663d0fe service nova] Lock "56aec5c2-d344-4a8d-a55a-930bc425150a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1180.043492] env[69927]: DEBUG nova.compute.manager [req-7037a4d6-f12f-483f-ace8-8b51315bb622 req-d79458fb-d5b2-4369-95f6-a45c3663d0fe service nova] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] No waiting events found dispatching network-vif-plugged-75ee960c-41d4-4858-8b1e-8198b77eb0d7 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1180.043732] env[69927]: WARNING nova.compute.manager [req-7037a4d6-f12f-483f-ace8-8b51315bb622 req-d79458fb-d5b2-4369-95f6-a45c3663d0fe service nova] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Received unexpected event network-vif-plugged-75ee960c-41d4-4858-8b1e-8198b77eb0d7 for instance with vm_state building and task_state spawning. [ 1180.043946] env[69927]: DEBUG nova.compute.manager [req-7037a4d6-f12f-483f-ace8-8b51315bb622 req-d79458fb-d5b2-4369-95f6-a45c3663d0fe service nova] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Received event network-changed-75ee960c-41d4-4858-8b1e-8198b77eb0d7 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1180.044176] env[69927]: DEBUG nova.compute.manager [req-7037a4d6-f12f-483f-ace8-8b51315bb622 req-d79458fb-d5b2-4369-95f6-a45c3663d0fe service nova] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Refreshing instance network info cache due to event network-changed-75ee960c-41d4-4858-8b1e-8198b77eb0d7. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1180.044528] env[69927]: DEBUG oslo_concurrency.lockutils [req-7037a4d6-f12f-483f-ace8-8b51315bb622 req-d79458fb-d5b2-4369-95f6-a45c3663d0fe service nova] Acquiring lock "refresh_cache-56aec5c2-d344-4a8d-a55a-930bc425150a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.044833] env[69927]: DEBUG oslo_concurrency.lockutils [req-7037a4d6-f12f-483f-ace8-8b51315bb622 req-d79458fb-d5b2-4369-95f6-a45c3663d0fe service nova] Acquired lock "refresh_cache-56aec5c2-d344-4a8d-a55a-930bc425150a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1180.044972] env[69927]: DEBUG nova.network.neutron [req-7037a4d6-f12f-483f-ace8-8b51315bb622 req-d79458fb-d5b2-4369-95f6-a45c3663d0fe service nova] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Refreshing network info cache for port 75ee960c-41d4-4858-8b1e-8198b77eb0d7 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1180.091086] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da5a0c0b-94be-440a-9217-3aeecc02017d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.105589] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85471208-02fb-468d-b077-f52ad445d45c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.127988] env[69927]: DEBUG nova.compute.provider_tree [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1180.147254] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096720, 'name': CreateVM_Task, 'duration_secs': 0.614581} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.147358] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1180.151119] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.151119] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1180.151119] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1180.151119] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84d2d463-e933-452e-9ae4-5c91e4a392df {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.161477] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1180.161477] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522eab1d-2d7c-09a6-02db-f5b55df03322" [ 1180.161477] env[69927]: _type = "Task" [ 1180.161477] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.165409] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "refresh_cache-56aec5c2-d344-4a8d-a55a-930bc425150a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.173770] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522eab1d-2d7c-09a6-02db-f5b55df03322, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.174906] env[69927]: DEBUG nova.network.neutron [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Successfully created port: 3e85edce-fa8a-45d4-b109-5bdd98a06303 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1180.253328] env[69927]: DEBUG nova.network.neutron [req-ff15676a-b6db-46c7-b44f-9d862333ce54 req-643e98a3-ba69-46a5-af57-5dd2a318d0de service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Updated VIF entry in instance network info cache for port a54251d6-cc17-4c26-95aa-a11a00c08e5e. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1180.253671] env[69927]: DEBUG nova.network.neutron [req-ff15676a-b6db-46c7-b44f-9d862333ce54 req-643e98a3-ba69-46a5-af57-5dd2a318d0de service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Updating instance_info_cache with network_info: [{"id": "a54251d6-cc17-4c26-95aa-a11a00c08e5e", "address": "fa:16:3e:58:62:a1", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa54251d6-cc", "ovs_interfaceid": "a54251d6-cc17-4c26-95aa-a11a00c08e5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.272108] env[69927]: DEBUG nova.compute.manager [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1180.318892] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096722, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.616609] env[69927]: DEBUG nova.network.neutron [req-7037a4d6-f12f-483f-ace8-8b51315bb622 req-d79458fb-d5b2-4369-95f6-a45c3663d0fe service nova] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1180.632217] env[69927]: DEBUG nova.scheduler.client.report [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1180.679284] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522eab1d-2d7c-09a6-02db-f5b55df03322, 'name': SearchDatastore_Task, 'duration_secs': 0.092116} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.682135] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1180.682135] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1180.682135] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.682135] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1180.682135] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1180.682135] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e5381f4f-bf98-4a12-be97-ba856d25e2f7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.699643] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1180.699841] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1180.700661] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c205660-e0db-49ab-b857-d09962b01f4a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.710663] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1180.710663] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5219c623-5b1a-c777-33e5-305767e31ecd" [ 1180.710663] env[69927]: _type = "Task" [ 1180.710663] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.714511] env[69927]: DEBUG nova.network.neutron [req-7037a4d6-f12f-483f-ace8-8b51315bb622 req-d79458fb-d5b2-4369-95f6-a45c3663d0fe service nova] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.721143] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5219c623-5b1a-c777-33e5-305767e31ecd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.760656] env[69927]: DEBUG oslo_concurrency.lockutils [req-ff15676a-b6db-46c7-b44f-9d862333ce54 req-643e98a3-ba69-46a5-af57-5dd2a318d0de service nova] Releasing lock "refresh_cache-5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1180.821773] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096722, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.141133] env[69927]: DEBUG oslo_concurrency.lockutils [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.882s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1181.143761] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.883s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1181.169350] env[69927]: INFO nova.scheduler.client.report [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Deleted allocations for instance ba7989a1-a644-4eb7-bf65-20ca1810dd62 [ 1181.217609] env[69927]: DEBUG oslo_concurrency.lockutils [req-7037a4d6-f12f-483f-ace8-8b51315bb622 req-d79458fb-d5b2-4369-95f6-a45c3663d0fe service nova] Releasing lock "refresh_cache-56aec5c2-d344-4a8d-a55a-930bc425150a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1181.218754] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired lock "refresh_cache-56aec5c2-d344-4a8d-a55a-930bc425150a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1181.218896] env[69927]: DEBUG nova.network.neutron [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1181.224342] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5219c623-5b1a-c777-33e5-305767e31ecd, 'name': SearchDatastore_Task, 'duration_secs': 0.091659} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.229270] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee901f93-71f8-490d-838b-7278765c056f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.238136] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1181.238136] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52fee065-2114-c576-e0e8-077e01c226c1" [ 1181.238136] env[69927]: _type = "Task" [ 1181.238136] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.250236] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52fee065-2114-c576-e0e8-077e01c226c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.283838] env[69927]: DEBUG nova.compute.manager [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1181.312344] env[69927]: DEBUG nova.virt.hardware [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1181.312629] env[69927]: DEBUG nova.virt.hardware [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1181.312873] env[69927]: DEBUG nova.virt.hardware [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1181.313102] env[69927]: DEBUG nova.virt.hardware [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1181.313258] env[69927]: DEBUG nova.virt.hardware [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1181.313420] env[69927]: DEBUG nova.virt.hardware [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1181.313664] env[69927]: DEBUG nova.virt.hardware [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1181.313829] env[69927]: DEBUG nova.virt.hardware [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1181.313996] env[69927]: DEBUG nova.virt.hardware [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1181.314172] env[69927]: DEBUG nova.virt.hardware [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1181.314346] env[69927]: DEBUG nova.virt.hardware [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1181.315271] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd1c624-5764-4ff6-8913-09e8751e7f67 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.332568] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096722, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.334172] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-028bc78d-9a02-4f75-8c85-f509c46117dc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.409331] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85bb905d-04b7-4e7d-91e0-f28d4f49d75e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.417870] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b94b18f-0330-4f02-b8b4-e22c3fa9f9e8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.451860] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f7535c-5fee-4fd8-aad9-2509d1ad3ada {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.460452] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12dfbf06-0309-4ab7-956b-603acc07f4ef {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.475398] env[69927]: DEBUG nova.compute.provider_tree [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1181.681742] env[69927]: DEBUG oslo_concurrency.lockutils [None req-392a721d-ddfa-493a-b240-ba889dd0466f tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "ba7989a1-a644-4eb7-bf65-20ca1810dd62" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.946s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1181.749883] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52fee065-2114-c576-e0e8-077e01c226c1, 'name': SearchDatastore_Task, 'duration_secs': 0.092369} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.750649] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1181.750649] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 5c87c74d-5998-4dfc-bc3c-c2887ff25195/5c87c74d-5998-4dfc-bc3c-c2887ff25195.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1181.750843] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-38442390-d129-4511-8a3e-a830aa8f0262 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.759782] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1181.759782] env[69927]: value = "task-4096723" [ 1181.759782] env[69927]: _type = "Task" [ 1181.759782] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.769450] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096723, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.793346] env[69927]: DEBUG nova.network.neutron [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1181.818138] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096722, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.830141] env[69927]: DEBUG nova.network.neutron [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Successfully updated port: 3e85edce-fa8a-45d4-b109-5bdd98a06303 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1181.911068] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a7e33a95-5006-4c3b-b66c-07e78f15dbd1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "1b22fbb0-8628-4c69-b9b4-d6d294c7458b" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1181.913681] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a7e33a95-5006-4c3b-b66c-07e78f15dbd1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "1b22fbb0-8628-4c69-b9b4-d6d294c7458b" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1181.978411] env[69927]: DEBUG nova.scheduler.client.report [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1182.054943] env[69927]: DEBUG nova.network.neutron [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Updating instance_info_cache with network_info: [{"id": "75ee960c-41d4-4858-8b1e-8198b77eb0d7", "address": "fa:16:3e:60:97:a0", "network": {"id": "2cac2664-2b1d-4bb6-a58a-f8e96679d038", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1945522269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3afde63c8cbe4aecb32a470fd6b948f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75ee960c-41", "ovs_interfaceid": "75ee960c-41d4-4858-8b1e-8198b77eb0d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1182.271048] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096723, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.307084] env[69927]: DEBUG nova.compute.manager [req-d7e2a550-6ad2-4051-ad61-8a12dc958a65 req-276b53ff-639f-4916-ba1a-b01279a2f760 service nova] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Received event network-vif-plugged-3e85edce-fa8a-45d4-b109-5bdd98a06303 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1182.307193] env[69927]: DEBUG oslo_concurrency.lockutils [req-d7e2a550-6ad2-4051-ad61-8a12dc958a65 req-276b53ff-639f-4916-ba1a-b01279a2f760 service nova] Acquiring lock "8b70b479-4a54-4bcb-813d-16cc0c9a67c5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1182.307399] env[69927]: DEBUG oslo_concurrency.lockutils [req-d7e2a550-6ad2-4051-ad61-8a12dc958a65 req-276b53ff-639f-4916-ba1a-b01279a2f760 service nova] Lock "8b70b479-4a54-4bcb-813d-16cc0c9a67c5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1182.307604] env[69927]: DEBUG oslo_concurrency.lockutils [req-d7e2a550-6ad2-4051-ad61-8a12dc958a65 req-276b53ff-639f-4916-ba1a-b01279a2f760 service nova] Lock "8b70b479-4a54-4bcb-813d-16cc0c9a67c5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1182.308014] env[69927]: DEBUG nova.compute.manager [req-d7e2a550-6ad2-4051-ad61-8a12dc958a65 req-276b53ff-639f-4916-ba1a-b01279a2f760 service nova] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] No waiting events found dispatching network-vif-plugged-3e85edce-fa8a-45d4-b109-5bdd98a06303 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1182.308014] env[69927]: WARNING nova.compute.manager [req-d7e2a550-6ad2-4051-ad61-8a12dc958a65 req-276b53ff-639f-4916-ba1a-b01279a2f760 service nova] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Received unexpected event network-vif-plugged-3e85edce-fa8a-45d4-b109-5bdd98a06303 for instance with vm_state building and task_state spawning. [ 1182.308251] env[69927]: DEBUG nova.compute.manager [req-d7e2a550-6ad2-4051-ad61-8a12dc958a65 req-276b53ff-639f-4916-ba1a-b01279a2f760 service nova] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Received event network-changed-3e85edce-fa8a-45d4-b109-5bdd98a06303 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1182.308304] env[69927]: DEBUG nova.compute.manager [req-d7e2a550-6ad2-4051-ad61-8a12dc958a65 req-276b53ff-639f-4916-ba1a-b01279a2f760 service nova] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Refreshing instance network info cache due to event network-changed-3e85edce-fa8a-45d4-b109-5bdd98a06303. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1182.309043] env[69927]: DEBUG oslo_concurrency.lockutils [req-d7e2a550-6ad2-4051-ad61-8a12dc958a65 req-276b53ff-639f-4916-ba1a-b01279a2f760 service nova] Acquiring lock "refresh_cache-8b70b479-4a54-4bcb-813d-16cc0c9a67c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.309043] env[69927]: DEBUG oslo_concurrency.lockutils [req-d7e2a550-6ad2-4051-ad61-8a12dc958a65 req-276b53ff-639f-4916-ba1a-b01279a2f760 service nova] Acquired lock "refresh_cache-8b70b479-4a54-4bcb-813d-16cc0c9a67c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1182.309794] env[69927]: DEBUG nova.network.neutron [req-d7e2a550-6ad2-4051-ad61-8a12dc958a65 req-276b53ff-639f-4916-ba1a-b01279a2f760 service nova] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Refreshing network info cache for port 3e85edce-fa8a-45d4-b109-5bdd98a06303 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1182.322505] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096722, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.514223} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.323177] env[69927]: INFO nova.virt.vmwareapi.ds_util [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_d972d6e5-403f-4b8e-9bf2-d8bd5f5d121a/OSTACK_IMG_d972d6e5-403f-4b8e-9bf2-d8bd5f5d121a.vmdk to [datastore1] devstack-image-cache_base/67c6308c-617a-40e4-b8e7-aa77c737c6ac/67c6308c-617a-40e4-b8e7-aa77c737c6ac.vmdk. [ 1182.323177] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Cleaning up location [datastore1] OSTACK_IMG_d972d6e5-403f-4b8e-9bf2-d8bd5f5d121a {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1182.323177] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_d972d6e5-403f-4b8e-9bf2-d8bd5f5d121a {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1182.323463] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-101c5881-b1d6-458a-8bcc-83c082dc5447 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.332616] env[69927]: DEBUG oslo_concurrency.lockutils [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "refresh_cache-8b70b479-4a54-4bcb-813d-16cc0c9a67c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.334420] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1182.334420] env[69927]: value = "task-4096724" [ 1182.334420] env[69927]: _type = "Task" [ 1182.334420] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.344252] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096724, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.416513] env[69927]: DEBUG nova.compute.utils [None req-a7e33a95-5006-4c3b-b66c-07e78f15dbd1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1182.483628] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.341s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1182.483889] env[69927]: INFO nova.compute.manager [None req-b80a8915-996b-4369-94d2-efda9d5e230d tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Successfully reverted task state from rebuilding on failure for instance. [ 1182.489884] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.868s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1182.491422] env[69927]: INFO nova.compute.claims [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1182.559636] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Releasing lock "refresh_cache-56aec5c2-d344-4a8d-a55a-930bc425150a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1182.559636] env[69927]: DEBUG nova.compute.manager [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Instance network_info: |[{"id": "75ee960c-41d4-4858-8b1e-8198b77eb0d7", "address": "fa:16:3e:60:97:a0", "network": {"id": "2cac2664-2b1d-4bb6-a58a-f8e96679d038", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1945522269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3afde63c8cbe4aecb32a470fd6b948f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75ee960c-41", "ovs_interfaceid": "75ee960c-41d4-4858-8b1e-8198b77eb0d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1182.559636] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:97:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '130387c4-e4ec-4d95-8e9d-bb079baabad8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '75ee960c-41d4-4858-8b1e-8198b77eb0d7', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1182.568845] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1182.569217] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1182.569363] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bf52f087-3e2f-4896-8b43-854988fc8e75 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.592093] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1182.592093] env[69927]: value = "task-4096725" [ 1182.592093] env[69927]: _type = "Task" [ 1182.592093] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.601994] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096725, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.771481] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096723, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.95417} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.771753] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 5c87c74d-5998-4dfc-bc3c-c2887ff25195/5c87c74d-5998-4dfc-bc3c-c2887ff25195.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1182.771974] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1182.772237] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-16012348-db0a-4590-8a0b-e3679a91635a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.779536] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1182.779536] env[69927]: value = "task-4096726" [ 1182.779536] env[69927]: _type = "Task" [ 1182.779536] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.788416] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096726, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.846322] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096724, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.04244} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.846594] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1182.846788] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Releasing lock "[datastore1] devstack-image-cache_base/67c6308c-617a-40e4-b8e7-aa77c737c6ac/67c6308c-617a-40e4-b8e7-aa77c737c6ac.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1182.847055] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/67c6308c-617a-40e4-b8e7-aa77c737c6ac/67c6308c-617a-40e4-b8e7-aa77c737c6ac.vmdk to [datastore1] cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a/cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1182.847425] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d890c66f-3ab3-47b3-9d99-b3ec8285490e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.850738] env[69927]: DEBUG nova.network.neutron [req-d7e2a550-6ad2-4051-ad61-8a12dc958a65 req-276b53ff-639f-4916-ba1a-b01279a2f760 service nova] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1182.858204] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1182.858204] env[69927]: value = "task-4096727" [ 1182.858204] env[69927]: _type = "Task" [ 1182.858204] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.867856] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096727, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.906922] env[69927]: DEBUG oslo_concurrency.lockutils [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "07814f60-1886-4b06-bcf7-e2c9b95a4501" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1182.906922] env[69927]: DEBUG oslo_concurrency.lockutils [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "07814f60-1886-4b06-bcf7-e2c9b95a4501" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1182.919915] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a7e33a95-5006-4c3b-b66c-07e78f15dbd1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "1b22fbb0-8628-4c69-b9b4-d6d294c7458b" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1182.970945] env[69927]: DEBUG nova.network.neutron [req-d7e2a550-6ad2-4051-ad61-8a12dc958a65 req-276b53ff-639f-4916-ba1a-b01279a2f760 service nova] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1183.103738] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096725, 'name': CreateVM_Task, 'duration_secs': 0.445159} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.103935] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1183.104681] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.104881] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1183.105241] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1183.105516] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b87e475e-c1a9-4293-97df-0103a7238fbc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.111530] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1183.111530] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521b439a-58e2-d9a7-e95a-28366a0d1464" [ 1183.111530] env[69927]: _type = "Task" [ 1183.111530] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.123320] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521b439a-58e2-d9a7-e95a-28366a0d1464, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.290138] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096726, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07336} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.290438] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1183.291223] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c6af5c-83b1-462f-9016-601c92eb9881 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.314566] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 5c87c74d-5998-4dfc-bc3c-c2887ff25195/5c87c74d-5998-4dfc-bc3c-c2887ff25195.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1183.314933] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-60ec651a-a2a3-4bd4-9fd3-0482eabd3c6e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.336658] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1183.336658] env[69927]: value = "task-4096728" [ 1183.336658] env[69927]: _type = "Task" [ 1183.336658] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.345308] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096728, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.369738] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096727, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.410267] env[69927]: DEBUG nova.compute.manager [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1183.475663] env[69927]: DEBUG oslo_concurrency.lockutils [req-d7e2a550-6ad2-4051-ad61-8a12dc958a65 req-276b53ff-639f-4916-ba1a-b01279a2f760 service nova] Releasing lock "refresh_cache-8b70b479-4a54-4bcb-813d-16cc0c9a67c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1183.476077] env[69927]: DEBUG oslo_concurrency.lockutils [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquired lock "refresh_cache-8b70b479-4a54-4bcb-813d-16cc0c9a67c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1183.476503] env[69927]: DEBUG nova.network.neutron [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1183.625663] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521b439a-58e2-d9a7-e95a-28366a0d1464, 'name': SearchDatastore_Task, 'duration_secs': 0.057574} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.626079] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1183.626331] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1183.626598] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.626752] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1183.626934] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1183.627241] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-678b3886-6f85-4c6e-aae3-248e281ce6a7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.647127] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1183.647344] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1183.648145] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0b949a9-d7ff-40df-807c-3bc59526d359 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.658269] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1183.658269] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52de921b-eaaf-65cf-f881-039c09273a5d" [ 1183.658269] env[69927]: _type = "Task" [ 1183.658269] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.668018] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52de921b-eaaf-65cf-f881-039c09273a5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.746657] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d932747-d41f-4e4f-8d13-2d7b2f06e126 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.755761] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae88fb49-7215-4677-a655-27cf7b4495db {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.792334] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ba27dc6-7f8f-4c2c-ab57-c9b072e1423e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.801396] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b4e7eb-f726-42c5-8259-c84f11728107 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.822828] env[69927]: DEBUG nova.compute.provider_tree [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1183.847915] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096728, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.869963] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096727, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.934057] env[69927]: DEBUG oslo_concurrency.lockutils [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1184.007672] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a7e33a95-5006-4c3b-b66c-07e78f15dbd1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "1b22fbb0-8628-4c69-b9b4-d6d294c7458b" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1184.008118] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a7e33a95-5006-4c3b-b66c-07e78f15dbd1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "1b22fbb0-8628-4c69-b9b4-d6d294c7458b" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1184.009133] env[69927]: INFO nova.compute.manager [None req-a7e33a95-5006-4c3b-b66c-07e78f15dbd1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Attaching volume ebc32a93-6e2f-4e7b-a036-5117cda5ebfd to /dev/sdb [ 1184.012380] env[69927]: DEBUG nova.network.neutron [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1184.051394] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94493c0f-8822-4653-b2a3-27c43438a500 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.059299] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df215c17-6c51-43f5-9d6e-be7daf345a21 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.074519] env[69927]: DEBUG nova.virt.block_device [None req-a7e33a95-5006-4c3b-b66c-07e78f15dbd1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Updating existing volume attachment record: c32cbeea-3b8b-4fa3-a963-d3fb1caa44c6 {{(pid=69927) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1184.170078] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52de921b-eaaf-65cf-f881-039c09273a5d, 'name': SearchDatastore_Task, 'duration_secs': 0.092154} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.170905] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-752f7b27-164a-4f01-b443-1702bd7f3fad {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.177224] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1184.177224] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d41169-3440-20b8-1bbd-330c98226e57" [ 1184.177224] env[69927]: _type = "Task" [ 1184.177224] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.186554] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d41169-3440-20b8-1bbd-330c98226e57, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.259206] env[69927]: DEBUG nova.network.neutron [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Updating instance_info_cache with network_info: [{"id": "3e85edce-fa8a-45d4-b109-5bdd98a06303", "address": "fa:16:3e:df:fd:25", "network": {"id": "bd90fedf-a086-42e6-9814-07044a035f6e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-951403655-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cf6bb3492c642aa9a168e484299289c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e85edce-fa", "ovs_interfaceid": "3e85edce-fa8a-45d4-b109-5bdd98a06303", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.328024] env[69927]: DEBUG nova.scheduler.client.report [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1184.349289] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096728, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.370677] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096727, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.691055] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d41169-3440-20b8-1bbd-330c98226e57, 'name': SearchDatastore_Task, 'duration_secs': 0.094989} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.691233] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1184.691402] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 56aec5c2-d344-4a8d-a55a-930bc425150a/56aec5c2-d344-4a8d-a55a-930bc425150a.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1184.691720] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b4b2b07b-83cc-4710-af5b-629657052ae6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.700157] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1184.700157] env[69927]: value = "task-4096730" [ 1184.700157] env[69927]: _type = "Task" [ 1184.700157] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.709230] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096730, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.761958] env[69927]: DEBUG oslo_concurrency.lockutils [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Releasing lock "refresh_cache-8b70b479-4a54-4bcb-813d-16cc0c9a67c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1184.761958] env[69927]: DEBUG nova.compute.manager [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Instance network_info: |[{"id": "3e85edce-fa8a-45d4-b109-5bdd98a06303", "address": "fa:16:3e:df:fd:25", "network": {"id": "bd90fedf-a086-42e6-9814-07044a035f6e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-951403655-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cf6bb3492c642aa9a168e484299289c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e85edce-fa", "ovs_interfaceid": "3e85edce-fa8a-45d4-b109-5bdd98a06303", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1184.762434] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:fd:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1cbd5e0e-9116-46f1-9748-13a73d2d7e75', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3e85edce-fa8a-45d4-b109-5bdd98a06303', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1184.772664] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1184.773091] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1184.773401] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c0c461a3-510a-442d-a883-3b9964ec4d52 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.798018] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1184.798018] env[69927]: value = "task-4096731" [ 1184.798018] env[69927]: _type = "Task" [ 1184.798018] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.808465] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096731, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.831871] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.342s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1184.832531] env[69927]: DEBUG nova.compute.manager [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1184.835532] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 7.918s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1184.850094] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096728, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.871872] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096727, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.211024] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096730, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.307893] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096731, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.339710] env[69927]: DEBUG nova.compute.utils [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1185.349068] env[69927]: DEBUG nova.compute.manager [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1185.349274] env[69927]: DEBUG nova.network.neutron [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1185.362318] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096728, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.374245] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096727, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.416148} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.374626] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/67c6308c-617a-40e4-b8e7-aa77c737c6ac/67c6308c-617a-40e4-b8e7-aa77c737c6ac.vmdk to [datastore1] cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a/cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1185.375580] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a166493c-9c0b-4527-9e36-cc0c010a8765 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.400957] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a/cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a.vmdk or device None with type streamOptimized {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1185.402676] env[69927]: DEBUG nova.policy [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bbeef6251f004acea30513e40de6c140', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de9e293e2d1a4e179f01f60e882851b8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1185.404336] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88a75bea-d71b-46a1-8c32-c2e018b8ed3f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.426013] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1185.426013] env[69927]: value = "task-4096732" [ 1185.426013] env[69927]: _type = "Task" [ 1185.426013] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.442695] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096732, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.714406] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096730, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.761142] env[69927]: DEBUG nova.network.neutron [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Successfully created port: 037faf17-cf20-417c-ab4d-b0a08944b7d9 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1185.809425] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096731, 'name': CreateVM_Task, 'duration_secs': 0.694579} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.809859] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1185.810362] env[69927]: DEBUG oslo_concurrency.lockutils [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.810532] env[69927]: DEBUG oslo_concurrency.lockutils [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1185.810884] env[69927]: DEBUG oslo_concurrency.lockutils [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1185.811152] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eaa8b008-f9f2-451d-af82-a2fbd89d1e52 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.816333] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1185.816333] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5295c70f-8906-ccd5-397e-330fadd31396" [ 1185.816333] env[69927]: _type = "Task" [ 1185.816333] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.824639] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5295c70f-8906-ccd5-397e-330fadd31396, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.849527] env[69927]: DEBUG nova.compute.manager [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1185.865895] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096728, 'name': ReconfigVM_Task, 'duration_secs': 2.08043} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.866340] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 5c87c74d-5998-4dfc-bc3c-c2887ff25195/5c87c74d-5998-4dfc-bc3c-c2887ff25195.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1185.867091] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c8734fb7-9285-4a4a-abed-b7ea192127bf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.877525] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1185.877525] env[69927]: value = "task-4096733" [ 1185.877525] env[69927]: _type = "Task" [ 1185.877525] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.884484] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1185.885413] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 15c44d86-829f-4317-ab66-9e61d4fb4dd0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1185.885413] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 1b22fbb0-8628-4c69-b9b4-d6d294c7458b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1185.885413] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance cff307ed-3c8b-4126-9749-1204597cbf6c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1185.885413] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 693a6c6b-8d1c-405e-bb17-73259e28f556 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1185.885413] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 7ff17f1d-31fd-440b-906c-2719770a9151 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1185.885413] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance d548ea75-9c1f-4884-b338-194f1b5d62ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1185.885821] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance da468d11-82a4-4fec-b06a-1b522bacdbc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1185.885873] env[69927]: WARNING nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 6828dc80-2e0e-4715-a620-42edbe5eec2f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1185.886104] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1185.886298] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 5c87c74d-5998-4dfc-bc3c-c2887ff25195 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1185.886530] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 56aec5c2-d344-4a8d-a55a-930bc425150a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1185.886599] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 8b70b479-4a54-4bcb-813d-16cc0c9a67c5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1185.887179] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance b007a697-7da4-4c97-9ccb-046d86b27568 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1185.895180] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096733, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.937020] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096732, 'name': ReconfigVM_Task, 'duration_secs': 0.493822} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.937020] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Reconfigured VM instance instance-0000004f to attach disk [datastore1] cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a/cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a.vmdk or device None with type streamOptimized {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1185.938095] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b41428d0-2d0a-4760-8a78-9e60efdf6ff9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.946114] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1185.946114] env[69927]: value = "task-4096734" [ 1185.946114] env[69927]: _type = "Task" [ 1185.946114] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.955774] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096734, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.213316] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096730, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.036788} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.213611] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 56aec5c2-d344-4a8d-a55a-930bc425150a/56aec5c2-d344-4a8d-a55a-930bc425150a.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1186.213831] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1186.214110] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-69986a35-cc70-46a2-9b5f-9b3a62d3f534 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.221496] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1186.221496] env[69927]: value = "task-4096735" [ 1186.221496] env[69927]: _type = "Task" [ 1186.221496] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.231738] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096735, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.326806] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5295c70f-8906-ccd5-397e-330fadd31396, 'name': SearchDatastore_Task, 'duration_secs': 0.010526} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.327165] env[69927]: DEBUG oslo_concurrency.lockutils [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1186.327405] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1186.327641] env[69927]: DEBUG oslo_concurrency.lockutils [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.327789] env[69927]: DEBUG oslo_concurrency.lockutils [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1186.327984] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1186.328267] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6743c130-7f40-4838-89a4-a0aea44d5225 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.337919] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1186.338135] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1186.338901] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e10e230d-acfa-4722-b3ba-270bd25d2bb4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.346784] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1186.346784] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525643b1-b50c-4b88-1aaa-529965b51af3" [ 1186.346784] env[69927]: _type = "Task" [ 1186.346784] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.354009] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525643b1-b50c-4b88-1aaa-529965b51af3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.392102] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096733, 'name': Rename_Task, 'duration_secs': 0.183513} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.392102] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1186.392102] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0fa7b456-ab02-49f3-917a-4adc56f8981a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.397346] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 07814f60-1886-4b06-bcf7-e2c9b95a4501 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 1186.397346] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1186.397346] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3008MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1186.401470] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1186.401470] env[69927]: value = "task-4096737" [ 1186.401470] env[69927]: _type = "Task" [ 1186.401470] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.410833] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096737, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.463683] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096734, 'name': Rename_Task, 'duration_secs': 0.211999} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.466717] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1186.467600] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-320f07a9-9adb-4163-bc6d-9f6f017507bc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.477661] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1186.477661] env[69927]: value = "task-4096738" [ 1186.477661] env[69927]: _type = "Task" [ 1186.477661] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.485463] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096738, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.650730] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c677b24-162a-4763-88a1-3e8a755c3052 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.658642] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa6102db-0f00-4e6c-bac4-c0f0fc209a96 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.692227] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af2bf3c-5519-457f-8cb4-4c3ba0b82253 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.700976] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-791a5c1a-0329-44b8-acc8-aef612ef3412 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.716258] env[69927]: DEBUG nova.compute.provider_tree [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1186.732616] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096735, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074411} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.732999] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1186.733828] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585171cd-61aa-4788-b09f-b0edbdd9b416 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.758018] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 56aec5c2-d344-4a8d-a55a-930bc425150a/56aec5c2-d344-4a8d-a55a-930bc425150a.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1186.758369] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0749c57-2ac4-4901-97f1-5d6845d65633 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.780708] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1186.780708] env[69927]: value = "task-4096739" [ 1186.780708] env[69927]: _type = "Task" [ 1186.780708] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.792376] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096739, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.858073] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525643b1-b50c-4b88-1aaa-529965b51af3, 'name': SearchDatastore_Task, 'duration_secs': 0.0107} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.858968] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9478328-8201-4fea-a267-5746b5c7ad30 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.865924] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1186.865924] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525090f1-bd73-3cc8-9f88-82f0c1c9c4e3" [ 1186.865924] env[69927]: _type = "Task" [ 1186.865924] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.870920] env[69927]: DEBUG nova.compute.manager [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1186.881155] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525090f1-bd73-3cc8-9f88-82f0c1c9c4e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.903281] env[69927]: DEBUG nova.virt.hardware [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1186.903591] env[69927]: DEBUG nova.virt.hardware [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1186.903756] env[69927]: DEBUG nova.virt.hardware [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1186.903926] env[69927]: DEBUG nova.virt.hardware [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1186.904104] env[69927]: DEBUG nova.virt.hardware [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1186.904265] env[69927]: DEBUG nova.virt.hardware [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1186.904802] env[69927]: DEBUG nova.virt.hardware [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1186.904984] env[69927]: DEBUG nova.virt.hardware [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1186.905174] env[69927]: DEBUG nova.virt.hardware [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1186.905339] env[69927]: DEBUG nova.virt.hardware [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1186.905513] env[69927]: DEBUG nova.virt.hardware [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1186.906687] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72425473-8996-4609-8f16-48c15630a43d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.921985] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4812d55-ab7a-4ea2-b3d0-e7a08b36a83f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.926039] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096737, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.986966] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096738, 'name': PowerOnVM_Task} progress is 71%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.220185] env[69927]: DEBUG nova.scheduler.client.report [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1187.295108] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096739, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.378785] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525090f1-bd73-3cc8-9f88-82f0c1c9c4e3, 'name': SearchDatastore_Task, 'duration_secs': 0.021421} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.379862] env[69927]: DEBUG oslo_concurrency.lockutils [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1187.380061] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 8b70b479-4a54-4bcb-813d-16cc0c9a67c5/8b70b479-4a54-4bcb-813d-16cc0c9a67c5.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1187.380357] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c7fee20d-707d-4f1a-8dd3-b88f86e26a17 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.389799] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1187.389799] env[69927]: value = "task-4096740" [ 1187.389799] env[69927]: _type = "Task" [ 1187.389799] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.400366] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096740, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.418346] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096737, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.492553] env[69927]: DEBUG oslo_vmware.api [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096738, 'name': PowerOnVM_Task, 'duration_secs': 0.863476} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.492901] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1187.588253] env[69927]: DEBUG nova.compute.manager [req-94e9c7a7-a958-4adb-b616-5d52cff0d6af req-835ac288-79fe-4494-af53-8277e1b48c86 service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Received event network-vif-plugged-037faf17-cf20-417c-ab4d-b0a08944b7d9 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1187.588526] env[69927]: DEBUG oslo_concurrency.lockutils [req-94e9c7a7-a958-4adb-b616-5d52cff0d6af req-835ac288-79fe-4494-af53-8277e1b48c86 service nova] Acquiring lock "b007a697-7da4-4c97-9ccb-046d86b27568-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.588878] env[69927]: DEBUG oslo_concurrency.lockutils [req-94e9c7a7-a958-4adb-b616-5d52cff0d6af req-835ac288-79fe-4494-af53-8277e1b48c86 service nova] Lock "b007a697-7da4-4c97-9ccb-046d86b27568-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.589454] env[69927]: DEBUG oslo_concurrency.lockutils [req-94e9c7a7-a958-4adb-b616-5d52cff0d6af req-835ac288-79fe-4494-af53-8277e1b48c86 service nova] Lock "b007a697-7da4-4c97-9ccb-046d86b27568-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1187.589863] env[69927]: DEBUG nova.compute.manager [req-94e9c7a7-a958-4adb-b616-5d52cff0d6af req-835ac288-79fe-4494-af53-8277e1b48c86 service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] No waiting events found dispatching network-vif-plugged-037faf17-cf20-417c-ab4d-b0a08944b7d9 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1187.590143] env[69927]: WARNING nova.compute.manager [req-94e9c7a7-a958-4adb-b616-5d52cff0d6af req-835ac288-79fe-4494-af53-8277e1b48c86 service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Received unexpected event network-vif-plugged-037faf17-cf20-417c-ab4d-b0a08944b7d9 for instance with vm_state building and task_state spawning. [ 1187.646931] env[69927]: DEBUG nova.compute.manager [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1187.647913] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-489133f5-6340-456c-b368-c6422d921dda {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.725649] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69927) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1187.726035] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.890s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1187.726683] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e539d65d-0cc0-4e95-b6e7-94e327e1dbd7 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.908s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.726974] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e539d65d-0cc0-4e95-b6e7-94e327e1dbd7 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1187.729800] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.550s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.730135] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1187.736039] env[69927]: DEBUG oslo_concurrency.lockutils [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.802s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.740559] env[69927]: INFO nova.compute.claims [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1187.771796] env[69927]: INFO nova.scheduler.client.report [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Deleted allocations for instance 6828dc80-2e0e-4715-a620-42edbe5eec2f [ 1187.797453] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096739, 'name': ReconfigVM_Task, 'duration_secs': 0.815534} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.797453] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 56aec5c2-d344-4a8d-a55a-930bc425150a/56aec5c2-d344-4a8d-a55a-930bc425150a.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1187.797919] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-36a2da93-6ab4-49e3-b292-3ffc6477804f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.808464] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1187.808464] env[69927]: value = "task-4096741" [ 1187.808464] env[69927]: _type = "Task" [ 1187.808464] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.808464] env[69927]: DEBUG nova.network.neutron [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Successfully updated port: 037faf17-cf20-417c-ab4d-b0a08944b7d9 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1187.824898] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096741, 'name': Rename_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.902246] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096740, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.916498] env[69927]: DEBUG oslo_vmware.api [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096737, 'name': PowerOnVM_Task, 'duration_secs': 1.025479} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.916956] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1187.917253] env[69927]: INFO nova.compute.manager [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Took 11.52 seconds to spawn the instance on the hypervisor. [ 1187.917535] env[69927]: DEBUG nova.compute.manager [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1187.918471] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad9d3c71-11ab-4d86-95ce-1d2fedbefe5e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.172314] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f483cd9b-1884-4b26-8ae2-145d7dd59472 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 22.874s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.267751] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e539d65d-0cc0-4e95-b6e7-94e327e1dbd7 tempest-ServerActionsV293TestJSON-1763504731 tempest-ServerActionsV293TestJSON-1763504731-project-member] Lock "a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.374s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.280750] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c20b8e6a-4485-4d96-aae9-615e6ad034a4 tempest-ServerDiskConfigTestJSON-714416826 tempest-ServerDiskConfigTestJSON-714416826-project-member] Lock "6828dc80-2e0e-4715-a620-42edbe5eec2f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.629s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.320113] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "refresh_cache-b007a697-7da4-4c97-9ccb-046d86b27568" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1188.320413] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquired lock "refresh_cache-b007a697-7da4-4c97-9ccb-046d86b27568" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1188.320515] env[69927]: DEBUG nova.network.neutron [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1188.321790] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096741, 'name': Rename_Task, 'duration_secs': 0.308975} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.322298] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1188.322505] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-873a45dc-71ee-4c78-b83c-6dde858a4397 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.332810] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1188.332810] env[69927]: value = "task-4096742" [ 1188.332810] env[69927]: _type = "Task" [ 1188.332810] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.341585] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096742, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.401113] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096740, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.657394} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.401456] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 8b70b479-4a54-4bcb-813d-16cc0c9a67c5/8b70b479-4a54-4bcb-813d-16cc0c9a67c5.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1188.401456] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1188.401805] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-907bff5a-bed7-4d40-a8e2-984197d83577 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.409477] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1188.409477] env[69927]: value = "task-4096743" [ 1188.409477] env[69927]: _type = "Task" [ 1188.409477] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.418715] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096743, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.440624] env[69927]: INFO nova.compute.manager [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Took 19.91 seconds to build instance. [ 1188.630160] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7e33a95-5006-4c3b-b66c-07e78f15dbd1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Volume attach. Driver type: vmdk {{(pid=69927) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1188.630434] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7e33a95-5006-4c3b-b66c-07e78f15dbd1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811577', 'volume_id': 'ebc32a93-6e2f-4e7b-a036-5117cda5ebfd', 'name': 'volume-ebc32a93-6e2f-4e7b-a036-5117cda5ebfd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1b22fbb0-8628-4c69-b9b4-d6d294c7458b', 'attached_at': '', 'detached_at': '', 'volume_id': 'ebc32a93-6e2f-4e7b-a036-5117cda5ebfd', 'serial': 'ebc32a93-6e2f-4e7b-a036-5117cda5ebfd'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1188.631514] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19d2dda3-55aa-4230-b289-43418f1890b1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.649626] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-555b93de-9faf-498a-8c36-87bcec930618 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.677908] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7e33a95-5006-4c3b-b66c-07e78f15dbd1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] volume-ebc32a93-6e2f-4e7b-a036-5117cda5ebfd/volume-ebc32a93-6e2f-4e7b-a036-5117cda5ebfd.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1188.678827] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ff563dd-9573-43e8-b535-c2ef3a123d6c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.700756] env[69927]: DEBUG oslo_vmware.api [None req-a7e33a95-5006-4c3b-b66c-07e78f15dbd1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1188.700756] env[69927]: value = "task-4096744" [ 1188.700756] env[69927]: _type = "Task" [ 1188.700756] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.710214] env[69927]: DEBUG oslo_vmware.api [None req-a7e33a95-5006-4c3b-b66c-07e78f15dbd1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096744, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.843061] env[69927]: DEBUG oslo_vmware.api [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096742, 'name': PowerOnVM_Task, 'duration_secs': 0.509586} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.845977] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1188.846238] env[69927]: INFO nova.compute.manager [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Took 10.00 seconds to spawn the instance on the hypervisor. [ 1188.846477] env[69927]: DEBUG nova.compute.manager [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1188.847609] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9262f7e4-3be4-42bf-9248-f168d4044c2d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.912589] env[69927]: DEBUG nova.network.neutron [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1188.925124] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096743, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097249} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.925400] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1188.926208] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a523beef-59b1-4e90-8991-caf5a7deb0d6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.945608] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1ae0c731-1f49-4f43-bc8b-8b68519e4dff tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "5c87c74d-5998-4dfc-bc3c-c2887ff25195" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.424s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.956086] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] 8b70b479-4a54-4bcb-813d-16cc0c9a67c5/8b70b479-4a54-4bcb-813d-16cc0c9a67c5.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1188.959276] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4fff36c-0028-470b-9c3d-b8a43b00cedd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.983407] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1188.983407] env[69927]: value = "task-4096745" [ 1188.983407] env[69927]: _type = "Task" [ 1188.983407] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.995650] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096745, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.033764] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18954b5d-59a6-435e-8e8a-65b8d032eb0c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.043203] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fbee0e8-7801-490a-8a51-34d0250633ec {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.082561] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1122d197-238b-4edc-bdf6-2ebba127f6d6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.086520] env[69927]: DEBUG nova.compute.manager [req-0105fab3-bfd8-4d6f-8738-031c6d3cdd36 req-31e00753-99c9-42b9-badb-822a44c1f6f8 service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Received event network-changed-b92f830b-5eef-4260-a56b-94af4a4ec679 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1189.086732] env[69927]: DEBUG nova.compute.manager [req-0105fab3-bfd8-4d6f-8738-031c6d3cdd36 req-31e00753-99c9-42b9-badb-822a44c1f6f8 service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Refreshing instance network info cache due to event network-changed-b92f830b-5eef-4260-a56b-94af4a4ec679. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1189.086976] env[69927]: DEBUG oslo_concurrency.lockutils [req-0105fab3-bfd8-4d6f-8738-031c6d3cdd36 req-31e00753-99c9-42b9-badb-822a44c1f6f8 service nova] Acquiring lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.087220] env[69927]: DEBUG oslo_concurrency.lockutils [req-0105fab3-bfd8-4d6f-8738-031c6d3cdd36 req-31e00753-99c9-42b9-badb-822a44c1f6f8 service nova] Acquired lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1189.087362] env[69927]: DEBUG nova.network.neutron [req-0105fab3-bfd8-4d6f-8738-031c6d3cdd36 req-31e00753-99c9-42b9-badb-822a44c1f6f8 service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Refreshing network info cache for port b92f830b-5eef-4260-a56b-94af4a4ec679 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1189.095870] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f27c5990-1308-4e2e-8a59-28839076c087 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.114141] env[69927]: DEBUG nova.compute.provider_tree [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1189.211161] env[69927]: DEBUG oslo_vmware.api [None req-a7e33a95-5006-4c3b-b66c-07e78f15dbd1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096744, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.254869] env[69927]: DEBUG nova.network.neutron [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Updating instance_info_cache with network_info: [{"id": "037faf17-cf20-417c-ab4d-b0a08944b7d9", "address": "fa:16:3e:10:4b:39", "network": {"id": "527076e8-f800-4686-883d-e70629d8ba0d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-704161827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de9e293e2d1a4e179f01f60e882851b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap037faf17-cf", "ovs_interfaceid": "037faf17-cf20-417c-ab4d-b0a08944b7d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1189.367436] env[69927]: INFO nova.compute.manager [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Took 19.86 seconds to build instance. [ 1189.497106] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096745, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.616821] env[69927]: DEBUG nova.scheduler.client.report [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1189.714387] env[69927]: DEBUG oslo_vmware.api [None req-a7e33a95-5006-4c3b-b66c-07e78f15dbd1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096744, 'name': ReconfigVM_Task, 'duration_secs': 0.919796} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.714804] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7e33a95-5006-4c3b-b66c-07e78f15dbd1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Reconfigured VM instance instance-0000005f to attach disk [datastore2] volume-ebc32a93-6e2f-4e7b-a036-5117cda5ebfd/volume-ebc32a93-6e2f-4e7b-a036-5117cda5ebfd.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1189.720289] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b091cb20-35b3-42a9-9dec-7a371d4abfb2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.736688] env[69927]: DEBUG oslo_vmware.api [None req-a7e33a95-5006-4c3b-b66c-07e78f15dbd1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1189.736688] env[69927]: value = "task-4096746" [ 1189.736688] env[69927]: _type = "Task" [ 1189.736688] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.747927] env[69927]: DEBUG oslo_vmware.api [None req-a7e33a95-5006-4c3b-b66c-07e78f15dbd1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096746, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.758694] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Releasing lock "refresh_cache-b007a697-7da4-4c97-9ccb-046d86b27568" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1189.759092] env[69927]: DEBUG nova.compute.manager [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Instance network_info: |[{"id": "037faf17-cf20-417c-ab4d-b0a08944b7d9", "address": "fa:16:3e:10:4b:39", "network": {"id": "527076e8-f800-4686-883d-e70629d8ba0d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-704161827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de9e293e2d1a4e179f01f60e882851b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap037faf17-cf", "ovs_interfaceid": "037faf17-cf20-417c-ab4d-b0a08944b7d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1189.759539] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:10:4b:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ce62383-8e84-4e26-955b-74c11392f4c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '037faf17-cf20-417c-ab4d-b0a08944b7d9', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1189.770085] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1189.771063] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1189.771416] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-14100378-2a35-4e4e-aa58-b68a4640c273 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.796843] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1189.796843] env[69927]: value = "task-4096747" [ 1189.796843] env[69927]: _type = "Task" [ 1189.796843] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.805310] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096747, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.870154] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8bec8f14-c0ff-49d6-ba70-6c90d4468776 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "56aec5c2-d344-4a8d-a55a-930bc425150a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.377s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1189.962682] env[69927]: DEBUG nova.compute.manager [req-8e42b195-d32a-40a8-9b67-664d6c48672e req-279bd893-25dd-4da3-adb3-fedf86fd04ac service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Received event network-changed-037faf17-cf20-417c-ab4d-b0a08944b7d9 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1189.963062] env[69927]: DEBUG nova.compute.manager [req-8e42b195-d32a-40a8-9b67-664d6c48672e req-279bd893-25dd-4da3-adb3-fedf86fd04ac service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Refreshing instance network info cache due to event network-changed-037faf17-cf20-417c-ab4d-b0a08944b7d9. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1189.965280] env[69927]: DEBUG oslo_concurrency.lockutils [req-8e42b195-d32a-40a8-9b67-664d6c48672e req-279bd893-25dd-4da3-adb3-fedf86fd04ac service nova] Acquiring lock "refresh_cache-b007a697-7da4-4c97-9ccb-046d86b27568" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.965417] env[69927]: DEBUG oslo_concurrency.lockutils [req-8e42b195-d32a-40a8-9b67-664d6c48672e req-279bd893-25dd-4da3-adb3-fedf86fd04ac service nova] Acquired lock "refresh_cache-b007a697-7da4-4c97-9ccb-046d86b27568" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1189.965664] env[69927]: DEBUG nova.network.neutron [req-8e42b195-d32a-40a8-9b67-664d6c48672e req-279bd893-25dd-4da3-adb3-fedf86fd04ac service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Refreshing network info cache for port 037faf17-cf20-417c-ab4d-b0a08944b7d9 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1189.996563] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096745, 'name': ReconfigVM_Task, 'duration_secs': 0.731184} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.996975] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Reconfigured VM instance instance-0000006a to attach disk [datastore1] 8b70b479-4a54-4bcb-813d-16cc0c9a67c5/8b70b479-4a54-4bcb-813d-16cc0c9a67c5.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1189.998062] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5403d93d-8ef6-40a7-883b-b955d580cf80 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.008446] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1190.008446] env[69927]: value = "task-4096748" [ 1190.008446] env[69927]: _type = "Task" [ 1190.008446] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.024584] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096748, 'name': Rename_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.123457] env[69927]: DEBUG oslo_concurrency.lockutils [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.387s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1190.124012] env[69927]: DEBUG nova.compute.manager [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1190.255292] env[69927]: DEBUG oslo_vmware.api [None req-a7e33a95-5006-4c3b-b66c-07e78f15dbd1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096746, 'name': ReconfigVM_Task, 'duration_secs': 0.215885} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.256082] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7e33a95-5006-4c3b-b66c-07e78f15dbd1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811577', 'volume_id': 'ebc32a93-6e2f-4e7b-a036-5117cda5ebfd', 'name': 'volume-ebc32a93-6e2f-4e7b-a036-5117cda5ebfd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1b22fbb0-8628-4c69-b9b4-d6d294c7458b', 'attached_at': '', 'detached_at': '', 'volume_id': 'ebc32a93-6e2f-4e7b-a036-5117cda5ebfd', 'serial': 'ebc32a93-6e2f-4e7b-a036-5117cda5ebfd'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1190.310018] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096747, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.499842] env[69927]: DEBUG nova.network.neutron [req-0105fab3-bfd8-4d6f-8738-031c6d3cdd36 req-31e00753-99c9-42b9-badb-822a44c1f6f8 service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Updated VIF entry in instance network info cache for port b92f830b-5eef-4260-a56b-94af4a4ec679. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1190.500311] env[69927]: DEBUG nova.network.neutron [req-0105fab3-bfd8-4d6f-8738-031c6d3cdd36 req-31e00753-99c9-42b9-badb-822a44c1f6f8 service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Updating instance_info_cache with network_info: [{"id": "b92f830b-5eef-4260-a56b-94af4a4ec679", "address": "fa:16:3e:a4:e7:1b", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb92f830b-5e", "ovs_interfaceid": "b92f830b-5eef-4260-a56b-94af4a4ec679", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1190.520089] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096748, 'name': Rename_Task, 'duration_secs': 0.187773} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.520387] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1190.520705] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8b1272a7-ceb9-49cd-a51f-620596768538 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.529637] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1190.529637] env[69927]: value = "task-4096749" [ 1190.529637] env[69927]: _type = "Task" [ 1190.529637] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.545764] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096749, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.630580] env[69927]: DEBUG nova.compute.utils [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1190.632068] env[69927]: DEBUG nova.compute.manager [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1190.632249] env[69927]: DEBUG nova.network.neutron [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1190.737255] env[69927]: DEBUG nova.policy [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '76414b2ae1aa4ab582c2b59fd4218005', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '544f2a021144492ba1aea46ce6075e53', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1190.809878] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096747, 'name': CreateVM_Task, 'duration_secs': 0.744658} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.810322] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1190.811111] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.811304] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1190.811634] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1190.812451] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-177aa6e6-c143-4f4b-a250-3497a5387408 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.818075] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1190.818075] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52fb57a3-bbe0-b2b1-07aa-61a096fc40d0" [ 1190.818075] env[69927]: _type = "Task" [ 1190.818075] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.828480] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52fb57a3-bbe0-b2b1-07aa-61a096fc40d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.005060] env[69927]: DEBUG oslo_concurrency.lockutils [req-0105fab3-bfd8-4d6f-8738-031c6d3cdd36 req-31e00753-99c9-42b9-badb-822a44c1f6f8 service nova] Releasing lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1191.044399] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096749, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.119175] env[69927]: DEBUG nova.network.neutron [req-8e42b195-d32a-40a8-9b67-664d6c48672e req-279bd893-25dd-4da3-adb3-fedf86fd04ac service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Updated VIF entry in instance network info cache for port 037faf17-cf20-417c-ab4d-b0a08944b7d9. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1191.119175] env[69927]: DEBUG nova.network.neutron [req-8e42b195-d32a-40a8-9b67-664d6c48672e req-279bd893-25dd-4da3-adb3-fedf86fd04ac service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Updating instance_info_cache with network_info: [{"id": "037faf17-cf20-417c-ab4d-b0a08944b7d9", "address": "fa:16:3e:10:4b:39", "network": {"id": "527076e8-f800-4686-883d-e70629d8ba0d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-704161827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de9e293e2d1a4e179f01f60e882851b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap037faf17-cf", "ovs_interfaceid": "037faf17-cf20-417c-ab4d-b0a08944b7d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.135801] env[69927]: DEBUG nova.compute.manager [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1191.153541] env[69927]: DEBUG nova.compute.manager [req-5c6656fb-2d72-4e9d-811f-dee0f8f8fa13 req-c46df80f-baeb-4c58-8906-b80ccbf5ad79 service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Received event network-changed-a54251d6-cc17-4c26-95aa-a11a00c08e5e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1191.153766] env[69927]: DEBUG nova.compute.manager [req-5c6656fb-2d72-4e9d-811f-dee0f8f8fa13 req-c46df80f-baeb-4c58-8906-b80ccbf5ad79 service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Refreshing instance network info cache due to event network-changed-a54251d6-cc17-4c26-95aa-a11a00c08e5e. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1191.153951] env[69927]: DEBUG oslo_concurrency.lockutils [req-5c6656fb-2d72-4e9d-811f-dee0f8f8fa13 req-c46df80f-baeb-4c58-8906-b80ccbf5ad79 service nova] Acquiring lock "refresh_cache-5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.154116] env[69927]: DEBUG oslo_concurrency.lockutils [req-5c6656fb-2d72-4e9d-811f-dee0f8f8fa13 req-c46df80f-baeb-4c58-8906-b80ccbf5ad79 service nova] Acquired lock "refresh_cache-5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1191.154279] env[69927]: DEBUG nova.network.neutron [req-5c6656fb-2d72-4e9d-811f-dee0f8f8fa13 req-c46df80f-baeb-4c58-8906-b80ccbf5ad79 service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Refreshing network info cache for port a54251d6-cc17-4c26-95aa-a11a00c08e5e {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1191.311833] env[69927]: DEBUG nova.objects.instance [None req-a7e33a95-5006-4c3b-b66c-07e78f15dbd1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lazy-loading 'flavor' on Instance uuid 1b22fbb0-8628-4c69-b9b4-d6d294c7458b {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1191.331439] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52fb57a3-bbe0-b2b1-07aa-61a096fc40d0, 'name': SearchDatastore_Task, 'duration_secs': 0.036311} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.332378] env[69927]: DEBUG nova.network.neutron [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Successfully created port: d355060d-92db-48c9-ac0c-a82f6c92c904 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1191.339312] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1191.339312] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1191.339312] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.339312] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1191.339312] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1191.339312] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ab08e6e-d34f-4d0d-bce4-36b0624edd59 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.350844] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1191.351152] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1191.352078] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff391ec6-3a43-4017-910d-d70b8c232cc8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.358597] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1191.358597] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529eadad-f201-795a-adcf-8128c300e27c" [ 1191.358597] env[69927]: _type = "Task" [ 1191.358597] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.367863] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529eadad-f201-795a-adcf-8128c300e27c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.546309] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096749, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.624146] env[69927]: DEBUG oslo_concurrency.lockutils [req-8e42b195-d32a-40a8-9b67-664d6c48672e req-279bd893-25dd-4da3-adb3-fedf86fd04ac service nova] Releasing lock "refresh_cache-b007a697-7da4-4c97-9ccb-046d86b27568" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1191.643593] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6c55921f-eca3-449e-b486-9796f7b78320 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "1b22fbb0-8628-4c69-b9b4-d6d294c7458b" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1191.821075] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a7e33a95-5006-4c3b-b66c-07e78f15dbd1 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "1b22fbb0-8628-4c69-b9b4-d6d294c7458b" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.813s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1191.821696] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6c55921f-eca3-449e-b486-9796f7b78320 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "1b22fbb0-8628-4c69-b9b4-d6d294c7458b" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.178s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1191.869991] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529eadad-f201-795a-adcf-8128c300e27c, 'name': SearchDatastore_Task, 'duration_secs': 0.020823} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.870896] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bda9aa9f-c5c7-40f7-9c53-15f3d4186bec {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.878467] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1191.878467] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52c69301-3141-0819-40c0-97e1c1ffa6a4" [ 1191.878467] env[69927]: _type = "Task" [ 1191.878467] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.888312] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52c69301-3141-0819-40c0-97e1c1ffa6a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.974773] env[69927]: DEBUG nova.network.neutron [req-5c6656fb-2d72-4e9d-811f-dee0f8f8fa13 req-c46df80f-baeb-4c58-8906-b80ccbf5ad79 service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Updated VIF entry in instance network info cache for port a54251d6-cc17-4c26-95aa-a11a00c08e5e. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1191.975139] env[69927]: DEBUG nova.network.neutron [req-5c6656fb-2d72-4e9d-811f-dee0f8f8fa13 req-c46df80f-baeb-4c58-8906-b80ccbf5ad79 service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Updating instance_info_cache with network_info: [{"id": "a54251d6-cc17-4c26-95aa-a11a00c08e5e", "address": "fa:16:3e:58:62:a1", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa54251d6-cc", "ovs_interfaceid": "a54251d6-cc17-4c26-95aa-a11a00c08e5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.983437] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36af930b-e316-4daa-bccd-815fbbf7c6bb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.990450] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a871d98e-e952-46d5-be97-c5d2681d1991 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Suspending the VM {{(pid=69927) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1191.990761] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-34c60610-3aac-462f-a632-2454b24f1e6b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.997482] env[69927]: DEBUG oslo_vmware.api [None req-a871d98e-e952-46d5-be97-c5d2681d1991 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1191.997482] env[69927]: value = "task-4096750" [ 1191.997482] env[69927]: _type = "Task" [ 1191.997482] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.006022] env[69927]: DEBUG oslo_vmware.api [None req-a871d98e-e952-46d5-be97-c5d2681d1991 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096750, 'name': SuspendVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.041898] env[69927]: DEBUG oslo_vmware.api [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096749, 'name': PowerOnVM_Task, 'duration_secs': 1.029102} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.042364] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1192.042364] env[69927]: INFO nova.compute.manager [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Took 10.76 seconds to spawn the instance on the hypervisor. [ 1192.042497] env[69927]: DEBUG nova.compute.manager [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1192.043338] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c98380-310d-4421-a7de-4dc92cf2f920 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.130427] env[69927]: DEBUG nova.compute.manager [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Stashing vm_state: active {{(pid=69927) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1192.149888] env[69927]: DEBUG nova.compute.manager [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1192.184715] env[69927]: DEBUG nova.virt.hardware [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1192.184974] env[69927]: DEBUG nova.virt.hardware [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1192.185196] env[69927]: DEBUG nova.virt.hardware [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1192.185449] env[69927]: DEBUG nova.virt.hardware [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1192.185675] env[69927]: DEBUG nova.virt.hardware [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1192.185797] env[69927]: DEBUG nova.virt.hardware [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1192.186041] env[69927]: DEBUG nova.virt.hardware [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1192.186192] env[69927]: DEBUG nova.virt.hardware [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1192.186364] env[69927]: DEBUG nova.virt.hardware [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1192.186557] env[69927]: DEBUG nova.virt.hardware [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1192.186766] env[69927]: DEBUG nova.virt.hardware [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1192.187966] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6226dbd-3596-423b-9f2d-6457e021be2f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.197220] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-671396f5-c21b-4324-b003-5bd3676b4df7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.324757] env[69927]: INFO nova.compute.manager [None req-6c55921f-eca3-449e-b486-9796f7b78320 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Detaching volume ebc32a93-6e2f-4e7b-a036-5117cda5ebfd [ 1192.368115] env[69927]: INFO nova.virt.block_device [None req-6c55921f-eca3-449e-b486-9796f7b78320 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Attempting to driver detach volume ebc32a93-6e2f-4e7b-a036-5117cda5ebfd from mountpoint /dev/sdb [ 1192.368326] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c55921f-eca3-449e-b486-9796f7b78320 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Volume detach. Driver type: vmdk {{(pid=69927) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1192.369039] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c55921f-eca3-449e-b486-9796f7b78320 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811577', 'volume_id': 'ebc32a93-6e2f-4e7b-a036-5117cda5ebfd', 'name': 'volume-ebc32a93-6e2f-4e7b-a036-5117cda5ebfd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1b22fbb0-8628-4c69-b9b4-d6d294c7458b', 'attached_at': '', 'detached_at': '', 'volume_id': 'ebc32a93-6e2f-4e7b-a036-5117cda5ebfd', 'serial': 'ebc32a93-6e2f-4e7b-a036-5117cda5ebfd'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1192.369529] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f16fb0-8a5e-4fe2-83d8-9326a68609a0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.397221] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f9fc951-dabd-42c4-906e-405d9e11fce1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.410283] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-791b60af-2661-4a6a-816e-549f8400833a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.414244] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52c69301-3141-0819-40c0-97e1c1ffa6a4, 'name': SearchDatastore_Task, 'duration_secs': 0.021422} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.414774] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1192.415222] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] b007a697-7da4-4c97-9ccb-046d86b27568/b007a697-7da4-4c97-9ccb-046d86b27568.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1192.416257] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e7f9820-dd1a-4514-bdc4-d5e51eb59d4a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.439654] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd24d24a-4e19-4cd0-a35e-99d4cb23e454 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.444355] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1192.444355] env[69927]: value = "task-4096751" [ 1192.444355] env[69927]: _type = "Task" [ 1192.444355] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.463046] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c55921f-eca3-449e-b486-9796f7b78320 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] The volume has not been displaced from its original location: [datastore2] volume-ebc32a93-6e2f-4e7b-a036-5117cda5ebfd/volume-ebc32a93-6e2f-4e7b-a036-5117cda5ebfd.vmdk. No consolidation needed. {{(pid=69927) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1192.469939] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c55921f-eca3-449e-b486-9796f7b78320 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Reconfiguring VM instance instance-0000005f to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1192.471101] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae43ac53-1d14-4e9b-a17b-d83d7b6530ac {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.493394] env[69927]: DEBUG oslo_concurrency.lockutils [req-5c6656fb-2d72-4e9d-811f-dee0f8f8fa13 req-c46df80f-baeb-4c58-8906-b80ccbf5ad79 service nova] Releasing lock "refresh_cache-5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1192.494054] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096751, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.503852] env[69927]: DEBUG oslo_vmware.api [None req-6c55921f-eca3-449e-b486-9796f7b78320 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1192.503852] env[69927]: value = "task-4096752" [ 1192.503852] env[69927]: _type = "Task" [ 1192.503852] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.512074] env[69927]: DEBUG oslo_vmware.api [None req-a871d98e-e952-46d5-be97-c5d2681d1991 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096750, 'name': SuspendVM_Task} progress is 54%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.519423] env[69927]: DEBUG oslo_vmware.api [None req-6c55921f-eca3-449e-b486-9796f7b78320 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096752, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.566336] env[69927]: INFO nova.compute.manager [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Took 20.97 seconds to build instance. [ 1192.658281] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.658752] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.957142] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096751, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.012132] env[69927]: DEBUG oslo_vmware.api [None req-a871d98e-e952-46d5-be97-c5d2681d1991 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096750, 'name': SuspendVM_Task, 'duration_secs': 0.778072} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.012517] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a871d98e-e952-46d5-be97-c5d2681d1991 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Suspended the VM {{(pid=69927) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1193.012646] env[69927]: DEBUG nova.compute.manager [None req-a871d98e-e952-46d5-be97-c5d2681d1991 tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1193.013999] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cefa168-10e5-4103-9960-bb9597f7d8d0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.021400] env[69927]: DEBUG oslo_vmware.api [None req-6c55921f-eca3-449e-b486-9796f7b78320 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096752, 'name': ReconfigVM_Task, 'duration_secs': 0.405964} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.021961] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c55921f-eca3-449e-b486-9796f7b78320 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Reconfigured VM instance instance-0000005f to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1193.029244] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc8487e7-006c-4deb-a1bd-365064aec8ee {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.051074] env[69927]: DEBUG oslo_vmware.api [None req-6c55921f-eca3-449e-b486-9796f7b78320 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1193.051074] env[69927]: value = "task-4096753" [ 1193.051074] env[69927]: _type = "Task" [ 1193.051074] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.065837] env[69927]: DEBUG oslo_vmware.api [None req-6c55921f-eca3-449e-b486-9796f7b78320 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096753, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.068527] env[69927]: DEBUG oslo_concurrency.lockutils [None req-99a75858-0c06-426a-bbfa-c9314904ce90 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "8b70b479-4a54-4bcb-813d-16cc0c9a67c5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.499s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.164597] env[69927]: INFO nova.compute.claims [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1193.199783] env[69927]: DEBUG nova.compute.manager [req-0b89db30-d750-4c39-b8dd-4c27afdb4a6e req-544da940-6854-4f92-819f-9d05d5f29e80 service nova] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Received event network-vif-plugged-d355060d-92db-48c9-ac0c-a82f6c92c904 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1193.200136] env[69927]: DEBUG oslo_concurrency.lockutils [req-0b89db30-d750-4c39-b8dd-4c27afdb4a6e req-544da940-6854-4f92-819f-9d05d5f29e80 service nova] Acquiring lock "07814f60-1886-4b06-bcf7-e2c9b95a4501-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.200210] env[69927]: DEBUG oslo_concurrency.lockutils [req-0b89db30-d750-4c39-b8dd-4c27afdb4a6e req-544da940-6854-4f92-819f-9d05d5f29e80 service nova] Lock "07814f60-1886-4b06-bcf7-e2c9b95a4501-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.200406] env[69927]: DEBUG oslo_concurrency.lockutils [req-0b89db30-d750-4c39-b8dd-4c27afdb4a6e req-544da940-6854-4f92-819f-9d05d5f29e80 service nova] Lock "07814f60-1886-4b06-bcf7-e2c9b95a4501-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.200588] env[69927]: DEBUG nova.compute.manager [req-0b89db30-d750-4c39-b8dd-4c27afdb4a6e req-544da940-6854-4f92-819f-9d05d5f29e80 service nova] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] No waiting events found dispatching network-vif-plugged-d355060d-92db-48c9-ac0c-a82f6c92c904 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1193.201457] env[69927]: WARNING nova.compute.manager [req-0b89db30-d750-4c39-b8dd-4c27afdb4a6e req-544da940-6854-4f92-819f-9d05d5f29e80 service nova] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Received unexpected event network-vif-plugged-d355060d-92db-48c9-ac0c-a82f6c92c904 for instance with vm_state building and task_state spawning. [ 1193.205720] env[69927]: DEBUG nova.compute.manager [req-3d6b96f0-19a1-4f26-82c7-ba3e95c1c662 req-694540fb-e6da-42c8-9bf1-b5e85911e7d8 service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Received event network-changed-a54251d6-cc17-4c26-95aa-a11a00c08e5e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1193.206057] env[69927]: DEBUG nova.compute.manager [req-3d6b96f0-19a1-4f26-82c7-ba3e95c1c662 req-694540fb-e6da-42c8-9bf1-b5e85911e7d8 service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Refreshing instance network info cache due to event network-changed-a54251d6-cc17-4c26-95aa-a11a00c08e5e. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1193.206511] env[69927]: DEBUG oslo_concurrency.lockutils [req-3d6b96f0-19a1-4f26-82c7-ba3e95c1c662 req-694540fb-e6da-42c8-9bf1-b5e85911e7d8 service nova] Acquiring lock "refresh_cache-5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1193.206627] env[69927]: DEBUG oslo_concurrency.lockutils [req-3d6b96f0-19a1-4f26-82c7-ba3e95c1c662 req-694540fb-e6da-42c8-9bf1-b5e85911e7d8 service nova] Acquired lock "refresh_cache-5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1193.206888] env[69927]: DEBUG nova.network.neutron [req-3d6b96f0-19a1-4f26-82c7-ba3e95c1c662 req-694540fb-e6da-42c8-9bf1-b5e85911e7d8 service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Refreshing network info cache for port a54251d6-cc17-4c26-95aa-a11a00c08e5e {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1193.316135] env[69927]: DEBUG nova.network.neutron [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Successfully updated port: d355060d-92db-48c9-ac0c-a82f6c92c904 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1193.459579] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096751, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.909419} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.459854] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] b007a697-7da4-4c97-9ccb-046d86b27568/b007a697-7da4-4c97-9ccb-046d86b27568.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1193.460116] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1193.460376] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-53e97622-f23e-469f-ae78-0f108d89978b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.468505] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1193.468505] env[69927]: value = "task-4096754" [ 1193.468505] env[69927]: _type = "Task" [ 1193.468505] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.477954] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096754, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.561733] env[69927]: DEBUG oslo_vmware.api [None req-6c55921f-eca3-449e-b486-9796f7b78320 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096753, 'name': ReconfigVM_Task, 'duration_secs': 0.209199} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.562269] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c55921f-eca3-449e-b486-9796f7b78320 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811577', 'volume_id': 'ebc32a93-6e2f-4e7b-a036-5117cda5ebfd', 'name': 'volume-ebc32a93-6e2f-4e7b-a036-5117cda5ebfd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1b22fbb0-8628-4c69-b9b4-d6d294c7458b', 'attached_at': '', 'detached_at': '', 'volume_id': 'ebc32a93-6e2f-4e7b-a036-5117cda5ebfd', 'serial': 'ebc32a93-6e2f-4e7b-a036-5117cda5ebfd'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1193.674522] env[69927]: INFO nova.compute.resource_tracker [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Updating resource usage from migration d6355cd8-1fbd-4455-b64b-c3b173c51f88 [ 1193.819206] env[69927]: DEBUG oslo_concurrency.lockutils [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "refresh_cache-07814f60-1886-4b06-bcf7-e2c9b95a4501" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1193.819355] env[69927]: DEBUG oslo_concurrency.lockutils [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired lock "refresh_cache-07814f60-1886-4b06-bcf7-e2c9b95a4501" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1193.819510] env[69927]: DEBUG nova.network.neutron [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1193.901205] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73810108-afbc-4800-9535-c81a1bb63b36 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.910184] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4706b22-de9d-4d49-8ea3-9cd6dc6faba1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.953188] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-211032a5-c4d3-4230-bd62-ed2adfb7f91e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.966956] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-575eb645-de8f-4a81-a740-2b68e3e64973 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.979952] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096754, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.107484} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.988484] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1193.989104] env[69927]: DEBUG nova.compute.provider_tree [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1193.994116] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47dd15e5-255f-4122-b409-ae0d07c64da5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.015396] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] b007a697-7da4-4c97-9ccb-046d86b27568/b007a697-7da4-4c97-9ccb-046d86b27568.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1194.019426] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4100cd5d-0213-452b-89b6-0d406dc413ac {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.040795] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1194.040795] env[69927]: value = "task-4096755" [ 1194.040795] env[69927]: _type = "Task" [ 1194.040795] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.050045] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096755, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.121932] env[69927]: DEBUG nova.objects.instance [None req-6c55921f-eca3-449e-b486-9796f7b78320 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lazy-loading 'flavor' on Instance uuid 1b22fbb0-8628-4c69-b9b4-d6d294c7458b {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1194.220456] env[69927]: DEBUG nova.network.neutron [req-3d6b96f0-19a1-4f26-82c7-ba3e95c1c662 req-694540fb-e6da-42c8-9bf1-b5e85911e7d8 service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Updated VIF entry in instance network info cache for port a54251d6-cc17-4c26-95aa-a11a00c08e5e. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1194.220967] env[69927]: DEBUG nova.network.neutron [req-3d6b96f0-19a1-4f26-82c7-ba3e95c1c662 req-694540fb-e6da-42c8-9bf1-b5e85911e7d8 service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Updating instance_info_cache with network_info: [{"id": "a54251d6-cc17-4c26-95aa-a11a00c08e5e", "address": "fa:16:3e:58:62:a1", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa54251d6-cc", "ovs_interfaceid": "a54251d6-cc17-4c26-95aa-a11a00c08e5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.377153] env[69927]: DEBUG nova.network.neutron [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1194.495898] env[69927]: DEBUG nova.scheduler.client.report [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1194.554756] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096755, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.560850] env[69927]: DEBUG nova.network.neutron [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Updating instance_info_cache with network_info: [{"id": "d355060d-92db-48c9-ac0c-a82f6c92c904", "address": "fa:16:3e:51:8b:29", "network": {"id": "5e9a6068-d0f1-44b2-b2a2-b87ebc0d53f2", "bridge": "br-int", "label": "tempest-ServersTestJSON-2067990706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "544f2a021144492ba1aea46ce6075e53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd355060d-92", "ovs_interfaceid": "d355060d-92db-48c9-ac0c-a82f6c92c904", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.724713] env[69927]: DEBUG oslo_concurrency.lockutils [req-3d6b96f0-19a1-4f26-82c7-ba3e95c1c662 req-694540fb-e6da-42c8-9bf1-b5e85911e7d8 service nova] Releasing lock "refresh_cache-5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1194.725025] env[69927]: DEBUG nova.compute.manager [req-3d6b96f0-19a1-4f26-82c7-ba3e95c1c662 req-694540fb-e6da-42c8-9bf1-b5e85911e7d8 service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Received event network-changed-b92f830b-5eef-4260-a56b-94af4a4ec679 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1194.725209] env[69927]: DEBUG nova.compute.manager [req-3d6b96f0-19a1-4f26-82c7-ba3e95c1c662 req-694540fb-e6da-42c8-9bf1-b5e85911e7d8 service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Refreshing instance network info cache due to event network-changed-b92f830b-5eef-4260-a56b-94af4a4ec679. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1194.725432] env[69927]: DEBUG oslo_concurrency.lockutils [req-3d6b96f0-19a1-4f26-82c7-ba3e95c1c662 req-694540fb-e6da-42c8-9bf1-b5e85911e7d8 service nova] Acquiring lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.725575] env[69927]: DEBUG oslo_concurrency.lockutils [req-3d6b96f0-19a1-4f26-82c7-ba3e95c1c662 req-694540fb-e6da-42c8-9bf1-b5e85911e7d8 service nova] Acquired lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1194.725735] env[69927]: DEBUG nova.network.neutron [req-3d6b96f0-19a1-4f26-82c7-ba3e95c1c662 req-694540fb-e6da-42c8-9bf1-b5e85911e7d8 service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Refreshing network info cache for port b92f830b-5eef-4260-a56b-94af4a4ec679 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1194.806700] env[69927]: INFO nova.compute.manager [None req-13059f83-4607-4dd0-9f57-375b6817a68a tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Resuming [ 1194.807582] env[69927]: DEBUG nova.objects.instance [None req-13059f83-4607-4dd0-9f57-375b6817a68a tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lazy-loading 'flavor' on Instance uuid cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1195.001667] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.343s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.001887] env[69927]: INFO nova.compute.manager [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Migrating [ 1195.053842] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096755, 'name': ReconfigVM_Task, 'duration_secs': 0.693721} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.054200] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Reconfigured VM instance instance-0000006b to attach disk [datastore2] b007a697-7da4-4c97-9ccb-046d86b27568/b007a697-7da4-4c97-9ccb-046d86b27568.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1195.054760] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-541654e4-826e-40ee-bdb8-2e98de20b64c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.062616] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1195.062616] env[69927]: value = "task-4096756" [ 1195.062616] env[69927]: _type = "Task" [ 1195.062616] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.066702] env[69927]: DEBUG oslo_concurrency.lockutils [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Releasing lock "refresh_cache-07814f60-1886-4b06-bcf7-e2c9b95a4501" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1195.067040] env[69927]: DEBUG nova.compute.manager [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Instance network_info: |[{"id": "d355060d-92db-48c9-ac0c-a82f6c92c904", "address": "fa:16:3e:51:8b:29", "network": {"id": "5e9a6068-d0f1-44b2-b2a2-b87ebc0d53f2", "bridge": "br-int", "label": "tempest-ServersTestJSON-2067990706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "544f2a021144492ba1aea46ce6075e53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd355060d-92", "ovs_interfaceid": "d355060d-92db-48c9-ac0c-a82f6c92c904", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1195.067511] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:8b:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ed8a78a1-87dc-488e-a092-afd1c2a2ddde', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd355060d-92db-48c9-ac0c-a82f6c92c904', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1195.078469] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1195.079514] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1195.079758] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4a3ba6cb-4fcf-47d0-8cbb-d7f447556b16 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.098334] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096756, 'name': Rename_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.104277] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1195.104277] env[69927]: value = "task-4096757" [ 1195.104277] env[69927]: _type = "Task" [ 1195.104277] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.113623] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096757, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.131520] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6c55921f-eca3-449e-b486-9796f7b78320 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "1b22fbb0-8628-4c69-b9b4-d6d294c7458b" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.310s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.232640] env[69927]: DEBUG nova.compute.manager [req-683732d9-d713-48a7-84fc-e76bfd590f93 req-3227a30e-4aeb-4ab1-bd8d-027f1137d548 service nova] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Received event network-changed-d355060d-92db-48c9-ac0c-a82f6c92c904 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1195.232640] env[69927]: DEBUG nova.compute.manager [req-683732d9-d713-48a7-84fc-e76bfd590f93 req-3227a30e-4aeb-4ab1-bd8d-027f1137d548 service nova] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Refreshing instance network info cache due to event network-changed-d355060d-92db-48c9-ac0c-a82f6c92c904. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1195.232917] env[69927]: DEBUG oslo_concurrency.lockutils [req-683732d9-d713-48a7-84fc-e76bfd590f93 req-3227a30e-4aeb-4ab1-bd8d-027f1137d548 service nova] Acquiring lock "refresh_cache-07814f60-1886-4b06-bcf7-e2c9b95a4501" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.233839] env[69927]: DEBUG oslo_concurrency.lockutils [req-683732d9-d713-48a7-84fc-e76bfd590f93 req-3227a30e-4aeb-4ab1-bd8d-027f1137d548 service nova] Acquired lock "refresh_cache-07814f60-1886-4b06-bcf7-e2c9b95a4501" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1195.233839] env[69927]: DEBUG nova.network.neutron [req-683732d9-d713-48a7-84fc-e76bfd590f93 req-3227a30e-4aeb-4ab1-bd8d-027f1137d548 service nova] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Refreshing network info cache for port d355060d-92db-48c9-ac0c-a82f6c92c904 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1195.253287] env[69927]: DEBUG nova.compute.manager [req-c49c76c3-a0ab-4f4c-8db6-da86eace1efc req-be98aad9-a5e5-4598-8e73-3947fb4ab720 service nova] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Received event network-changed-3e85edce-fa8a-45d4-b109-5bdd98a06303 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1195.253287] env[69927]: DEBUG nova.compute.manager [req-c49c76c3-a0ab-4f4c-8db6-da86eace1efc req-be98aad9-a5e5-4598-8e73-3947fb4ab720 service nova] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Refreshing instance network info cache due to event network-changed-3e85edce-fa8a-45d4-b109-5bdd98a06303. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1195.253455] env[69927]: DEBUG oslo_concurrency.lockutils [req-c49c76c3-a0ab-4f4c-8db6-da86eace1efc req-be98aad9-a5e5-4598-8e73-3947fb4ab720 service nova] Acquiring lock "refresh_cache-8b70b479-4a54-4bcb-813d-16cc0c9a67c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.253602] env[69927]: DEBUG oslo_concurrency.lockutils [req-c49c76c3-a0ab-4f4c-8db6-da86eace1efc req-be98aad9-a5e5-4598-8e73-3947fb4ab720 service nova] Acquired lock "refresh_cache-8b70b479-4a54-4bcb-813d-16cc0c9a67c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1195.253764] env[69927]: DEBUG nova.network.neutron [req-c49c76c3-a0ab-4f4c-8db6-da86eace1efc req-be98aad9-a5e5-4598-8e73-3947fb4ab720 service nova] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Refreshing network info cache for port 3e85edce-fa8a-45d4-b109-5bdd98a06303 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1195.521170] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "refresh_cache-56aec5c2-d344-4a8d-a55a-930bc425150a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.521836] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired lock "refresh_cache-56aec5c2-d344-4a8d-a55a-930bc425150a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1195.522452] env[69927]: DEBUG nova.network.neutron [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1195.580565] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096756, 'name': Rename_Task, 'duration_secs': 0.290353} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.580991] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1195.581351] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aeb9cdf5-3f13-419e-b6d2-539216baf51b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.589799] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1195.589799] env[69927]: value = "task-4096758" [ 1195.589799] env[69927]: _type = "Task" [ 1195.589799] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.600252] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096758, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.614679] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096757, 'name': CreateVM_Task, 'duration_secs': 0.490145} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.614925] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1195.615687] env[69927]: DEBUG oslo_concurrency.lockutils [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.615907] env[69927]: DEBUG oslo_concurrency.lockutils [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1195.616285] env[69927]: DEBUG oslo_concurrency.lockutils [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1195.616594] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67047765-ced4-48a2-aa90-2c97c6fffe88 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.622092] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1195.622092] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]527ce5c3-d11c-18b6-d0d7-7276c636cdab" [ 1195.622092] env[69927]: _type = "Task" [ 1195.622092] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.638308] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]527ce5c3-d11c-18b6-d0d7-7276c636cdab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.970688] env[69927]: DEBUG nova.network.neutron [req-3d6b96f0-19a1-4f26-82c7-ba3e95c1c662 req-694540fb-e6da-42c8-9bf1-b5e85911e7d8 service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Updated VIF entry in instance network info cache for port b92f830b-5eef-4260-a56b-94af4a4ec679. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1195.971083] env[69927]: DEBUG nova.network.neutron [req-3d6b96f0-19a1-4f26-82c7-ba3e95c1c662 req-694540fb-e6da-42c8-9bf1-b5e85911e7d8 service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Updating instance_info_cache with network_info: [{"id": "b92f830b-5eef-4260-a56b-94af4a4ec679", "address": "fa:16:3e:a4:e7:1b", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb92f830b-5e", "ovs_interfaceid": "b92f830b-5eef-4260-a56b-94af4a4ec679", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.096861] env[69927]: DEBUG nova.network.neutron [req-683732d9-d713-48a7-84fc-e76bfd590f93 req-3227a30e-4aeb-4ab1-bd8d-027f1137d548 service nova] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Updated VIF entry in instance network info cache for port d355060d-92db-48c9-ac0c-a82f6c92c904. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1196.097574] env[69927]: DEBUG nova.network.neutron [req-683732d9-d713-48a7-84fc-e76bfd590f93 req-3227a30e-4aeb-4ab1-bd8d-027f1137d548 service nova] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Updating instance_info_cache with network_info: [{"id": "d355060d-92db-48c9-ac0c-a82f6c92c904", "address": "fa:16:3e:51:8b:29", "network": {"id": "5e9a6068-d0f1-44b2-b2a2-b87ebc0d53f2", "bridge": "br-int", "label": "tempest-ServersTestJSON-2067990706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "544f2a021144492ba1aea46ce6075e53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd355060d-92", "ovs_interfaceid": "d355060d-92db-48c9-ac0c-a82f6c92c904", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.105619] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096758, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.133092] env[69927]: DEBUG nova.network.neutron [req-c49c76c3-a0ab-4f4c-8db6-da86eace1efc req-be98aad9-a5e5-4598-8e73-3947fb4ab720 service nova] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Updated VIF entry in instance network info cache for port 3e85edce-fa8a-45d4-b109-5bdd98a06303. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1196.133092] env[69927]: DEBUG nova.network.neutron [req-c49c76c3-a0ab-4f4c-8db6-da86eace1efc req-be98aad9-a5e5-4598-8e73-3947fb4ab720 service nova] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Updating instance_info_cache with network_info: [{"id": "3e85edce-fa8a-45d4-b109-5bdd98a06303", "address": "fa:16:3e:df:fd:25", "network": {"id": "bd90fedf-a086-42e6-9814-07044a035f6e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-951403655-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cf6bb3492c642aa9a168e484299289c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e85edce-fa", "ovs_interfaceid": "3e85edce-fa8a-45d4-b109-5bdd98a06303", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.138130] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]527ce5c3-d11c-18b6-d0d7-7276c636cdab, 'name': SearchDatastore_Task, 'duration_secs': 0.030385} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.138130] env[69927]: DEBUG oslo_concurrency.lockutils [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1196.138130] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1196.138302] env[69927]: DEBUG oslo_concurrency.lockutils [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.138852] env[69927]: DEBUG oslo_concurrency.lockutils [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1196.138852] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1196.141029] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b5145cd-65e4-4b99-88db-ded4ddf92bb7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.152398] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1196.152642] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1196.153383] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a49ce85-8f92-41f6-a272-c3b3270f847c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.161836] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1196.161836] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b5dc21-bd1a-f737-8ebb-45f0d8a2adbe" [ 1196.161836] env[69927]: _type = "Task" [ 1196.161836] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.177126] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b5dc21-bd1a-f737-8ebb-45f0d8a2adbe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.320938] env[69927]: DEBUG oslo_concurrency.lockutils [None req-13059f83-4607-4dd0-9f57-375b6817a68a tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquiring lock "refresh_cache-cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.321057] env[69927]: DEBUG oslo_concurrency.lockutils [None req-13059f83-4607-4dd0-9f57-375b6817a68a tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquired lock "refresh_cache-cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1196.321247] env[69927]: DEBUG nova.network.neutron [None req-13059f83-4607-4dd0-9f57-375b6817a68a tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1196.408515] env[69927]: DEBUG nova.network.neutron [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Updating instance_info_cache with network_info: [{"id": "75ee960c-41d4-4858-8b1e-8198b77eb0d7", "address": "fa:16:3e:60:97:a0", "network": {"id": "2cac2664-2b1d-4bb6-a58a-f8e96679d038", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1945522269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3afde63c8cbe4aecb32a470fd6b948f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75ee960c-41", "ovs_interfaceid": "75ee960c-41d4-4858-8b1e-8198b77eb0d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.476845] env[69927]: DEBUG oslo_concurrency.lockutils [req-3d6b96f0-19a1-4f26-82c7-ba3e95c1c662 req-694540fb-e6da-42c8-9bf1-b5e85911e7d8 service nova] Releasing lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1196.601687] env[69927]: DEBUG oslo_concurrency.lockutils [req-683732d9-d713-48a7-84fc-e76bfd590f93 req-3227a30e-4aeb-4ab1-bd8d-027f1137d548 service nova] Releasing lock "refresh_cache-07814f60-1886-4b06-bcf7-e2c9b95a4501" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1196.602143] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096758, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.616055] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "1b22fbb0-8628-4c69-b9b4-d6d294c7458b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.616055] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "1b22fbb0-8628-4c69-b9b4-d6d294c7458b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.616055] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "1b22fbb0-8628-4c69-b9b4-d6d294c7458b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.616055] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "1b22fbb0-8628-4c69-b9b4-d6d294c7458b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.616055] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "1b22fbb0-8628-4c69-b9b4-d6d294c7458b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.618171] env[69927]: INFO nova.compute.manager [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Terminating instance [ 1196.639483] env[69927]: DEBUG oslo_concurrency.lockutils [req-c49c76c3-a0ab-4f4c-8db6-da86eace1efc req-be98aad9-a5e5-4598-8e73-3947fb4ab720 service nova] Releasing lock "refresh_cache-8b70b479-4a54-4bcb-813d-16cc0c9a67c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1196.675123] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b5dc21-bd1a-f737-8ebb-45f0d8a2adbe, 'name': SearchDatastore_Task, 'duration_secs': 0.009845} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.676032] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cff537b7-1aeb-4938-9915-14066173271c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.682690] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1196.682690] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52164038-aaa7-1b3f-035e-9d19b6f9f61c" [ 1196.682690] env[69927]: _type = "Task" [ 1196.682690] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.692990] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52164038-aaa7-1b3f-035e-9d19b6f9f61c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.731610] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Acquiring lock "2a0a6870-47ad-4958-afed-bdbda3e54c21" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.731857] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Lock "2a0a6870-47ad-4958-afed-bdbda3e54c21" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.911691] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Releasing lock "refresh_cache-56aec5c2-d344-4a8d-a55a-930bc425150a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1197.102373] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096758, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.125020] env[69927]: DEBUG nova.compute.manager [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1197.125020] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1197.125020] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02627661-4bff-4c94-b0de-5e6dfb7fa398 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.140457] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1197.146419] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7bbfcc79-4cb3-4fe3-b3eb-11d230c02e1f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.146419] env[69927]: DEBUG nova.network.neutron [None req-13059f83-4607-4dd0-9f57-375b6817a68a tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Updating instance_info_cache with network_info: [{"id": "eb2105ba-0276-4bc6-a2af-933090d4cdcd", "address": "fa:16:3e:23:0c:58", "network": {"id": "16f178eb-5c9f-4d2d-bde1-6816bb4e832b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1614734358-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef85ff9fc3d240a8a24b6cea8dda0f6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb2105ba-02", "ovs_interfaceid": "eb2105ba-0276-4bc6-a2af-933090d4cdcd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.156292] env[69927]: DEBUG oslo_vmware.api [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1197.156292] env[69927]: value = "task-4096759" [ 1197.156292] env[69927]: _type = "Task" [ 1197.156292] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.174250] env[69927]: DEBUG oslo_vmware.api [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096759, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.194564] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52164038-aaa7-1b3f-035e-9d19b6f9f61c, 'name': SearchDatastore_Task, 'duration_secs': 0.011553} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.194870] env[69927]: DEBUG oslo_concurrency.lockutils [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1197.195150] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 07814f60-1886-4b06-bcf7-e2c9b95a4501/07814f60-1886-4b06-bcf7-e2c9b95a4501.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1197.195463] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b2fecda0-6e29-4dde-985d-d1d2b1b2bf46 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.203809] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1197.203809] env[69927]: value = "task-4096760" [ 1197.203809] env[69927]: _type = "Task" [ 1197.203809] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.214062] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096760, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.235948] env[69927]: DEBUG nova.compute.manager [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1197.605314] env[69927]: DEBUG oslo_vmware.api [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096758, 'name': PowerOnVM_Task, 'duration_secs': 1.864147} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.605314] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1197.605314] env[69927]: INFO nova.compute.manager [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Took 10.73 seconds to spawn the instance on the hypervisor. [ 1197.605314] env[69927]: DEBUG nova.compute.manager [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1197.606086] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac7f4a08-0ee4-4935-a9f8-06c9d0b0f3a0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.649421] env[69927]: DEBUG oslo_concurrency.lockutils [None req-13059f83-4607-4dd0-9f57-375b6817a68a tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Releasing lock "refresh_cache-cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1197.652309] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec125ac7-569d-4c82-9b96-e8de70af9431 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.663052] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-13059f83-4607-4dd0-9f57-375b6817a68a tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Resuming the VM {{(pid=69927) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1197.664084] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-91c84817-429e-48d1-85cb-e9fe59bf0635 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.672108] env[69927]: DEBUG oslo_vmware.api [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096759, 'name': PowerOffVM_Task, 'duration_secs': 0.233439} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.673664] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1197.673869] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1197.674244] env[69927]: DEBUG oslo_vmware.api [None req-13059f83-4607-4dd0-9f57-375b6817a68a tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1197.674244] env[69927]: value = "task-4096761" [ 1197.674244] env[69927]: _type = "Task" [ 1197.674244] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.674490] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-00ff20cd-1733-4773-80ac-7ec51cb3f97f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.685367] env[69927]: DEBUG oslo_vmware.api [None req-13059f83-4607-4dd0-9f57-375b6817a68a tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096761, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.716245] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096760, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.767307] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1197.767620] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1197.769458] env[69927]: INFO nova.compute.claims [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1197.863214] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "interface-d548ea75-9c1f-4884-b338-194f1b5d62ef-70d5cdb3-0681-41ab-aa95-e5ae4a5245f3" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1197.863527] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "interface-d548ea75-9c1f-4884-b338-194f1b5d62ef-70d5cdb3-0681-41ab-aa95-e5ae4a5245f3" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1197.863910] env[69927]: DEBUG nova.objects.instance [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lazy-loading 'flavor' on Instance uuid d548ea75-9c1f-4884-b338-194f1b5d62ef {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1197.918653] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1197.918878] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1197.919076] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Deleting the datastore file [datastore1] 1b22fbb0-8628-4c69-b9b4-d6d294c7458b {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1197.919356] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1205e529-8656-40f7-8bcc-291cf8df52a5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.928661] env[69927]: DEBUG oslo_vmware.api [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1197.928661] env[69927]: value = "task-4096763" [ 1197.928661] env[69927]: _type = "Task" [ 1197.928661] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.938379] env[69927]: DEBUG oslo_vmware.api [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096763, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.128157] env[69927]: INFO nova.compute.manager [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Took 23.53 seconds to build instance. [ 1198.188670] env[69927]: DEBUG oslo_vmware.api [None req-13059f83-4607-4dd0-9f57-375b6817a68a tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096761, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.218308] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096760, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.594765} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.218770] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 07814f60-1886-4b06-bcf7-e2c9b95a4501/07814f60-1886-4b06-bcf7-e2c9b95a4501.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1198.219050] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1198.219334] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-befb05e7-c6c8-4e1d-a754-61e71263984a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.230631] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1198.230631] env[69927]: value = "task-4096764" [ 1198.230631] env[69927]: _type = "Task" [ 1198.230631] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.244470] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096764, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.429365] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a56920f-bdd3-4cd1-ae90-2d42322ce2c8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.443333] env[69927]: DEBUG oslo_vmware.api [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096763, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160999} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.458476] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1198.458744] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1198.458977] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1198.459211] env[69927]: INFO nova.compute.manager [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Took 1.34 seconds to destroy the instance on the hypervisor. [ 1198.459484] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1198.459791] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Updating instance '56aec5c2-d344-4a8d-a55a-930bc425150a' progress to 0 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1198.463439] env[69927]: DEBUG nova.compute.manager [-] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1198.463585] env[69927]: DEBUG nova.network.neutron [-] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1198.473278] env[69927]: DEBUG nova.objects.instance [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lazy-loading 'pci_requests' on Instance uuid d548ea75-9c1f-4884-b338-194f1b5d62ef {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1198.630766] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f9a5ab49-7944-49fb-8e17-95e7427b7ab8 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "b007a697-7da4-4c97-9ccb-046d86b27568" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.040s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.689886] env[69927]: DEBUG oslo_vmware.api [None req-13059f83-4607-4dd0-9f57-375b6817a68a tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096761, 'name': PowerOnVM_Task, 'duration_secs': 0.609778} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.690778] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-13059f83-4607-4dd0-9f57-375b6817a68a tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Resumed the VM {{(pid=69927) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1198.691406] env[69927]: DEBUG nova.compute.manager [None req-13059f83-4607-4dd0-9f57-375b6817a68a tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1198.692581] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aab14c7-557c-4bd0-9ed2-ed41b5f44681 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.744463] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096764, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.129695} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.745035] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1198.746797] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af04855f-e2b0-4b00-bf82-2ae3c037dae7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.781030] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] 07814f60-1886-4b06-bcf7-e2c9b95a4501/07814f60-1886-4b06-bcf7-e2c9b95a4501.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1198.784173] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1eb4ded5-df9b-4d2c-b307-a27e5fd57b80 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.826957] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1198.826957] env[69927]: value = "task-4096765" [ 1198.826957] env[69927]: _type = "Task" [ 1198.826957] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.842099] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096765, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.968542] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1198.968928] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9c9b51dd-0ee5-4929-a914-9daad63daf59 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.979801] env[69927]: DEBUG nova.objects.base [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=69927) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1198.980216] env[69927]: DEBUG nova.network.neutron [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1198.985934] env[69927]: DEBUG oslo_vmware.api [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1198.985934] env[69927]: value = "task-4096766" [ 1198.985934] env[69927]: _type = "Task" [ 1198.985934] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.001196] env[69927]: DEBUG oslo_vmware.api [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096766, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.102930] env[69927]: DEBUG nova.policy [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ed20f23b4104e2ea75ea29b804c79d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ed984d7170742eca7e89bf3bf45e6ae', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1199.107313] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a65b52-54b7-4e08-9f69-88ae062b8478 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.115687] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05794fda-c860-408e-b719-c8bfe436cecb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.162469] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3177605c-a78b-4780-8696-5511e799731f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.173309] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1c4d942-6940-4200-b582-ea350a25f301 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.189281] env[69927]: DEBUG nova.compute.provider_tree [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1199.341012] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096765, 'name': ReconfigVM_Task, 'duration_secs': 0.387717} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.341403] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Reconfigured VM instance instance-0000006c to attach disk [datastore2] 07814f60-1886-4b06-bcf7-e2c9b95a4501/07814f60-1886-4b06-bcf7-e2c9b95a4501.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1199.342061] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0d0ef31d-c293-4e46-8c6a-990857ee300b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.350599] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1199.350599] env[69927]: value = "task-4096767" [ 1199.350599] env[69927]: _type = "Task" [ 1199.350599] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.363609] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096767, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.370117] env[69927]: DEBUG nova.compute.manager [req-f2658da8-9123-4298-81ed-00dd79329d5c req-ac86985f-bf2f-45f0-b5fb-a151349b6f29 service nova] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Received event network-vif-deleted-9251f381-f500-4fb3-a407-0020e12af1ec {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1199.370338] env[69927]: INFO nova.compute.manager [req-f2658da8-9123-4298-81ed-00dd79329d5c req-ac86985f-bf2f-45f0-b5fb-a151349b6f29 service nova] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Neutron deleted interface 9251f381-f500-4fb3-a407-0020e12af1ec; detaching it from the instance and deleting it from the info cache [ 1199.370513] env[69927]: DEBUG nova.network.neutron [req-f2658da8-9123-4298-81ed-00dd79329d5c req-ac86985f-bf2f-45f0-b5fb-a151349b6f29 service nova] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.484179] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "693a6c6b-8d1c-405e-bb17-73259e28f556" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1199.484387] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "693a6c6b-8d1c-405e-bb17-73259e28f556" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1199.484539] env[69927]: INFO nova.compute.manager [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Shelving [ 1199.487301] env[69927]: DEBUG nova.network.neutron [-] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.499829] env[69927]: DEBUG oslo_vmware.api [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096766, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.715136] env[69927]: ERROR nova.scheduler.client.report [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [req-92334f53-0621-4e2e-97a5-6ffd31de17a1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2f529b36-df5f-4b37-8103-68f74f737726. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-92334f53-0621-4e2e-97a5-6ffd31de17a1"}]} [ 1199.732864] env[69927]: DEBUG nova.scheduler.client.report [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Refreshing inventories for resource provider 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1199.750695] env[69927]: DEBUG nova.scheduler.client.report [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Updating ProviderTree inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1199.751031] env[69927]: DEBUG nova.compute.provider_tree [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1199.763821] env[69927]: DEBUG nova.scheduler.client.report [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Refreshing aggregate associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, aggregates: None {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1199.787655] env[69927]: DEBUG nova.scheduler.client.report [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Refreshing trait associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1199.862385] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096767, 'name': Rename_Task, 'duration_secs': 0.172841} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.865165] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1199.865602] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e85bf28c-ec45-4eea-a021-2b1d44ef418d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.873613] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1199.873613] env[69927]: value = "task-4096768" [ 1199.873613] env[69927]: _type = "Task" [ 1199.873613] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.873850] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2f9c3e37-7c3c-405c-bfad-e7e2659a38a2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.888444] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096768, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.892297] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e76b8c0-1231-415e-a3e0-0d07d5d59462 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.932630] env[69927]: DEBUG nova.compute.manager [req-f2658da8-9123-4298-81ed-00dd79329d5c req-ac86985f-bf2f-45f0-b5fb-a151349b6f29 service nova] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Detach interface failed, port_id=9251f381-f500-4fb3-a407-0020e12af1ec, reason: Instance 1b22fbb0-8628-4c69-b9b4-d6d294c7458b could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1199.998646] env[69927]: INFO nova.compute.manager [-] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Took 1.53 seconds to deallocate network for instance. [ 1200.014276] env[69927]: DEBUG oslo_vmware.api [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096766, 'name': PowerOffVM_Task, 'duration_secs': 0.985348} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.014501] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1200.014694] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Updating instance '56aec5c2-d344-4a8d-a55a-930bc425150a' progress to 17 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1200.056439] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4642bb93-1e40-4b55-98ab-5d91885f77d7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.067034] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6edf0caf-d2b7-48f4-bc19-84c0d9acdfab {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.101200] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-638bd20a-a436-4706-966a-e07a501484c4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.110688] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860defc5-6385-435a-bf57-48bc536678cc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.126526] env[69927]: DEBUG nova.compute.provider_tree [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1200.391534] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096768, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.499802] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1200.500156] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e507342c-336a-4661-9012-3b575b2cc4fb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.508833] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1200.509886] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1200.509886] env[69927]: value = "task-4096769" [ 1200.509886] env[69927]: _type = "Task" [ 1200.509886] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.523189] env[69927]: DEBUG nova.virt.hardware [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:35:01Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1200.523628] env[69927]: DEBUG nova.virt.hardware [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1200.523888] env[69927]: DEBUG nova.virt.hardware [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1200.524210] env[69927]: DEBUG nova.virt.hardware [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1200.524436] env[69927]: DEBUG nova.virt.hardware [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1200.524623] env[69927]: DEBUG nova.virt.hardware [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1200.524878] env[69927]: DEBUG nova.virt.hardware [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1200.525112] env[69927]: DEBUG nova.virt.hardware [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1200.525297] env[69927]: DEBUG nova.virt.hardware [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1200.525544] env[69927]: DEBUG nova.virt.hardware [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1200.525755] env[69927]: DEBUG nova.virt.hardware [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1200.533990] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096769, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.533990] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88883bc9-8daa-43b7-a611-a9dad9a87068 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.551262] env[69927]: DEBUG oslo_vmware.api [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1200.551262] env[69927]: value = "task-4096770" [ 1200.551262] env[69927]: _type = "Task" [ 1200.551262] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.561136] env[69927]: DEBUG oslo_vmware.api [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096770, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.676810] env[69927]: DEBUG nova.scheduler.client.report [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Updated inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 with generation 153 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1200.677114] env[69927]: DEBUG nova.compute.provider_tree [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Updating resource provider 2f529b36-df5f-4b37-8103-68f74f737726 generation from 153 to 154 during operation: update_inventory {{(pid=69927) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1200.677307] env[69927]: DEBUG nova.compute.provider_tree [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1200.886763] env[69927]: DEBUG oslo_vmware.api [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096768, 'name': PowerOnVM_Task, 'duration_secs': 0.675693} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.887055] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1200.887264] env[69927]: INFO nova.compute.manager [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Took 8.74 seconds to spawn the instance on the hypervisor. [ 1200.887448] env[69927]: DEBUG nova.compute.manager [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1200.890132] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98eb6ebb-dd4b-465d-985f-41aef57408ca {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.898871] env[69927]: DEBUG nova.network.neutron [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Successfully updated port: 70d5cdb3-0681-41ab-aa95-e5ae4a5245f3 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1201.020837] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096769, 'name': PowerOffVM_Task, 'duration_secs': 0.334481} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.021140] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1201.022029] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4591df-07aa-43a2-a9a7-a5fdb05946a9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.040924] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50407ee-a4a3-41d8-8459-8d844f00fa0e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.056895] env[69927]: DEBUG nova.compute.manager [req-4b900ffa-ae22-442f-9310-8844f3f2953c req-abf4f283-1184-427a-abd4-3889d82fd32b service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Received event network-vif-plugged-70d5cdb3-0681-41ab-aa95-e5ae4a5245f3 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1201.057775] env[69927]: DEBUG oslo_concurrency.lockutils [req-4b900ffa-ae22-442f-9310-8844f3f2953c req-abf4f283-1184-427a-abd4-3889d82fd32b service nova] Acquiring lock "d548ea75-9c1f-4884-b338-194f1b5d62ef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1201.057775] env[69927]: DEBUG oslo_concurrency.lockutils [req-4b900ffa-ae22-442f-9310-8844f3f2953c req-abf4f283-1184-427a-abd4-3889d82fd32b service nova] Lock "d548ea75-9c1f-4884-b338-194f1b5d62ef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1201.057775] env[69927]: DEBUG oslo_concurrency.lockutils [req-4b900ffa-ae22-442f-9310-8844f3f2953c req-abf4f283-1184-427a-abd4-3889d82fd32b service nova] Lock "d548ea75-9c1f-4884-b338-194f1b5d62ef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1201.057775] env[69927]: DEBUG nova.compute.manager [req-4b900ffa-ae22-442f-9310-8844f3f2953c req-abf4f283-1184-427a-abd4-3889d82fd32b service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] No waiting events found dispatching network-vif-plugged-70d5cdb3-0681-41ab-aa95-e5ae4a5245f3 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1201.058077] env[69927]: WARNING nova.compute.manager [req-4b900ffa-ae22-442f-9310-8844f3f2953c req-abf4f283-1184-427a-abd4-3889d82fd32b service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Received unexpected event network-vif-plugged-70d5cdb3-0681-41ab-aa95-e5ae4a5245f3 for instance with vm_state active and task_state None. [ 1201.063829] env[69927]: DEBUG oslo_vmware.api [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096770, 'name': ReconfigVM_Task, 'duration_secs': 0.409868} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.064135] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Updating instance '56aec5c2-d344-4a8d-a55a-930bc425150a' progress to 33 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1201.182936] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.415s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1201.183503] env[69927]: DEBUG nova.compute.manager [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1201.187797] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.679s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1201.188093] env[69927]: DEBUG nova.objects.instance [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lazy-loading 'resources' on Instance uuid 1b22fbb0-8628-4c69-b9b4-d6d294c7458b {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1201.251284] env[69927]: DEBUG nova.objects.instance [None req-5c95dc73-8fae-4720-b854-e72b83573f06 tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Lazy-loading 'flavor' on Instance uuid 7ff17f1d-31fd-440b-906c-2719770a9151 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1201.406230] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1201.406857] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1201.406857] env[69927]: DEBUG nova.network.neutron [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1201.411016] env[69927]: INFO nova.compute.manager [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Took 17.49 seconds to build instance. [ 1201.507481] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1201.507660] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1201.552532] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Creating Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1201.552895] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-bf95ca74-3a89-4300-8ee9-e2e6d86396e1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.561974] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1201.561974] env[69927]: value = "task-4096771" [ 1201.561974] env[69927]: _type = "Task" [ 1201.561974] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.571927] env[69927]: DEBUG nova.virt.hardware [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1201.572303] env[69927]: DEBUG nova.virt.hardware [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1201.572457] env[69927]: DEBUG nova.virt.hardware [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1201.572710] env[69927]: DEBUG nova.virt.hardware [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1201.572881] env[69927]: DEBUG nova.virt.hardware [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1201.573082] env[69927]: DEBUG nova.virt.hardware [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1201.573337] env[69927]: DEBUG nova.virt.hardware [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1201.573586] env[69927]: DEBUG nova.virt.hardware [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1201.573823] env[69927]: DEBUG nova.virt.hardware [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1201.574056] env[69927]: DEBUG nova.virt.hardware [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1201.574285] env[69927]: DEBUG nova.virt.hardware [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1201.580494] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Reconfiguring VM instance instance-00000069 to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1201.584806] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21718813-7fe7-432c-b496-2756b5f264d6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.601020] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096771, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.603761] env[69927]: DEBUG nova.compute.manager [req-c50c6744-8753-40a6-beed-cd735bc9bf6b req-93fc64ec-3f6e-4f63-b1a6-108ae5f9e8fb service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Received event network-changed-037faf17-cf20-417c-ab4d-b0a08944b7d9 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1201.604088] env[69927]: DEBUG nova.compute.manager [req-c50c6744-8753-40a6-beed-cd735bc9bf6b req-93fc64ec-3f6e-4f63-b1a6-108ae5f9e8fb service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Refreshing instance network info cache due to event network-changed-037faf17-cf20-417c-ab4d-b0a08944b7d9. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1201.604361] env[69927]: DEBUG oslo_concurrency.lockutils [req-c50c6744-8753-40a6-beed-cd735bc9bf6b req-93fc64ec-3f6e-4f63-b1a6-108ae5f9e8fb service nova] Acquiring lock "refresh_cache-b007a697-7da4-4c97-9ccb-046d86b27568" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1201.604425] env[69927]: DEBUG oslo_concurrency.lockutils [req-c50c6744-8753-40a6-beed-cd735bc9bf6b req-93fc64ec-3f6e-4f63-b1a6-108ae5f9e8fb service nova] Acquired lock "refresh_cache-b007a697-7da4-4c97-9ccb-046d86b27568" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1201.604642] env[69927]: DEBUG nova.network.neutron [req-c50c6744-8753-40a6-beed-cd735bc9bf6b req-93fc64ec-3f6e-4f63-b1a6-108ae5f9e8fb service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Refreshing network info cache for port 037faf17-cf20-417c-ab4d-b0a08944b7d9 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1201.614055] env[69927]: DEBUG oslo_vmware.api [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1201.614055] env[69927]: value = "task-4096772" [ 1201.614055] env[69927]: _type = "Task" [ 1201.614055] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.625802] env[69927]: DEBUG oslo_vmware.api [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096772, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.689301] env[69927]: DEBUG nova.compute.utils [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1201.691522] env[69927]: DEBUG nova.compute.manager [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1201.691859] env[69927]: DEBUG nova.network.neutron [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1201.758098] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c95dc73-8fae-4720-b854-e72b83573f06 tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Acquiring lock "refresh_cache-7ff17f1d-31fd-440b-906c-2719770a9151" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1201.758305] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c95dc73-8fae-4720-b854-e72b83573f06 tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Acquired lock "refresh_cache-7ff17f1d-31fd-440b-906c-2719770a9151" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1201.771322] env[69927]: DEBUG nova.policy [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7aa1c141ecd44cbca0214986bb28d94b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c73f101f61ff4315a9e407b784df06f4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1201.912443] env[69927]: DEBUG oslo_concurrency.lockutils [None req-422db256-e919-47c8-95e8-00180d0fb091 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "07814f60-1886-4b06-bcf7-e2c9b95a4501" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.005s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1201.950267] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9205b76f-ea5c-42a5-ab2c-cf59bc68cfe8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.961013] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b704cc-298f-457a-9d18-9659c72dcdc0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.016128] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b9bffd-2c1d-4f4b-9571-a6df3a7c0b21 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.026582] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b795067-94d6-426d-9bde-109f7b9be8ce {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.043581] env[69927]: DEBUG nova.compute.provider_tree [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1202.072713] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096771, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.128423] env[69927]: DEBUG oslo_vmware.api [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096772, 'name': ReconfigVM_Task, 'duration_secs': 0.227024} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.132030] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Reconfigured VM instance instance-00000069 to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1202.132030] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f1fa19-d39c-4df4-a59b-8946e1a375c6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.155324] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 56aec5c2-d344-4a8d-a55a-930bc425150a/56aec5c2-d344-4a8d-a55a-930bc425150a.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1202.155732] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b51affad-4ffb-4439-bede-b6a27e5810f2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.177386] env[69927]: DEBUG oslo_vmware.api [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1202.177386] env[69927]: value = "task-4096773" [ 1202.177386] env[69927]: _type = "Task" [ 1202.177386] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.187490] env[69927]: DEBUG oslo_vmware.api [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096773, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.192203] env[69927]: WARNING nova.network.neutron [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] b8b342c3-e0d7-4186-9541-03e865142f8a already exists in list: networks containing: ['b8b342c3-e0d7-4186-9541-03e865142f8a']. ignoring it [ 1202.195815] env[69927]: DEBUG nova.compute.manager [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1202.242918] env[69927]: DEBUG nova.network.neutron [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Successfully created port: 43c0ab61-f8c2-4ed2-8aa0-effc43628918 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1202.548041] env[69927]: DEBUG nova.scheduler.client.report [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1202.580021] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096771, 'name': CreateSnapshot_Task, 'duration_secs': 0.594281} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.582195] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Created Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1202.583562] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b641fc-a44d-42e6-9afa-d79069bf9bf3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.691889] env[69927]: DEBUG oslo_vmware.api [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096773, 'name': ReconfigVM_Task, 'duration_secs': 0.324571} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.691889] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 56aec5c2-d344-4a8d-a55a-930bc425150a/56aec5c2-d344-4a8d-a55a-930bc425150a.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1202.692109] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Updating instance '56aec5c2-d344-4a8d-a55a-930bc425150a' progress to 50 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1202.764539] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "e38222c4-3362-4d47-aee4-d26ccb4cbf3c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1202.764780] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "e38222c4-3362-4d47-aee4-d26ccb4cbf3c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1202.850930] env[69927]: DEBUG nova.network.neutron [req-c50c6744-8753-40a6-beed-cd735bc9bf6b req-93fc64ec-3f6e-4f63-b1a6-108ae5f9e8fb service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Updated VIF entry in instance network info cache for port 037faf17-cf20-417c-ab4d-b0a08944b7d9. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1202.851301] env[69927]: DEBUG nova.network.neutron [req-c50c6744-8753-40a6-beed-cd735bc9bf6b req-93fc64ec-3f6e-4f63-b1a6-108ae5f9e8fb service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Updating instance_info_cache with network_info: [{"id": "037faf17-cf20-417c-ab4d-b0a08944b7d9", "address": "fa:16:3e:10:4b:39", "network": {"id": "527076e8-f800-4686-883d-e70629d8ba0d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-704161827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de9e293e2d1a4e179f01f60e882851b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap037faf17-cf", "ovs_interfaceid": "037faf17-cf20-417c-ab4d-b0a08944b7d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1202.919444] env[69927]: DEBUG nova.network.neutron [None req-5c95dc73-8fae-4720-b854-e72b83573f06 tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1202.955957] env[69927]: DEBUG nova.network.neutron [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Updating instance_info_cache with network_info: [{"id": "b92f830b-5eef-4260-a56b-94af4a4ec679", "address": "fa:16:3e:a4:e7:1b", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb92f830b-5e", "ovs_interfaceid": "b92f830b-5eef-4260-a56b-94af4a4ec679", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "70d5cdb3-0681-41ab-aa95-e5ae4a5245f3", "address": "fa:16:3e:32:88:2b", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70d5cdb3-06", "ovs_interfaceid": "70d5cdb3-0681-41ab-aa95-e5ae4a5245f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.010523] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1203.010761] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1203.058318] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.870s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1203.082706] env[69927]: INFO nova.scheduler.client.report [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Deleted allocations for instance 1b22fbb0-8628-4c69-b9b4-d6d294c7458b [ 1203.085909] env[69927]: DEBUG nova.compute.manager [req-98f34c4f-2282-43b5-952c-f91ba2a90416 req-334a4dcd-7853-4703-a78b-c23fa3c66d76 service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Received event network-changed-70d5cdb3-0681-41ab-aa95-e5ae4a5245f3 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1203.086202] env[69927]: DEBUG nova.compute.manager [req-98f34c4f-2282-43b5-952c-f91ba2a90416 req-334a4dcd-7853-4703-a78b-c23fa3c66d76 service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Refreshing instance network info cache due to event network-changed-70d5cdb3-0681-41ab-aa95-e5ae4a5245f3. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1203.086344] env[69927]: DEBUG oslo_concurrency.lockutils [req-98f34c4f-2282-43b5-952c-f91ba2a90416 req-334a4dcd-7853-4703-a78b-c23fa3c66d76 service nova] Acquiring lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.106018] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Creating linked-clone VM from snapshot {{(pid=69927) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1203.106928] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-98898b44-49b0-4106-8798-d7442309ccf4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.117559] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1203.117559] env[69927]: value = "task-4096774" [ 1203.117559] env[69927]: _type = "Task" [ 1203.117559] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.127944] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096774, 'name': CloneVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.200192] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a555e5ca-e317-4222-af22-ca97db16e478 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.206529] env[69927]: DEBUG nova.compute.manager [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1203.227810] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79169aa-bdc1-45cb-aa6c-3a3df66f095e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.252113] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Updating instance '56aec5c2-d344-4a8d-a55a-930bc425150a' progress to 67 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1203.267160] env[69927]: DEBUG nova.virt.hardware [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1203.267444] env[69927]: DEBUG nova.virt.hardware [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1203.267598] env[69927]: DEBUG nova.virt.hardware [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1203.267778] env[69927]: DEBUG nova.virt.hardware [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1203.267926] env[69927]: DEBUG nova.virt.hardware [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1203.268080] env[69927]: DEBUG nova.virt.hardware [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1203.268295] env[69927]: DEBUG nova.virt.hardware [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1203.268453] env[69927]: DEBUG nova.virt.hardware [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1203.268622] env[69927]: DEBUG nova.virt.hardware [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1203.268822] env[69927]: DEBUG nova.virt.hardware [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1203.269031] env[69927]: DEBUG nova.virt.hardware [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1203.269487] env[69927]: DEBUG nova.compute.manager [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1203.272957] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30022c48-44c9-493c-a0f2-437a57444001 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.284205] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10fa1707-5180-4e37-a5b7-891aa98e256e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.355156] env[69927]: DEBUG oslo_concurrency.lockutils [req-c50c6744-8753-40a6-beed-cd735bc9bf6b req-93fc64ec-3f6e-4f63-b1a6-108ae5f9e8fb service nova] Releasing lock "refresh_cache-b007a697-7da4-4c97-9ccb-046d86b27568" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1203.458483] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1203.459243] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.459432] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1203.459684] env[69927]: DEBUG oslo_concurrency.lockutils [req-98f34c4f-2282-43b5-952c-f91ba2a90416 req-334a4dcd-7853-4703-a78b-c23fa3c66d76 service nova] Acquired lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1203.459887] env[69927]: DEBUG nova.network.neutron [req-98f34c4f-2282-43b5-952c-f91ba2a90416 req-334a4dcd-7853-4703-a78b-c23fa3c66d76 service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Refreshing network info cache for port 70d5cdb3-0681-41ab-aa95-e5ae4a5245f3 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1203.461798] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bcacc97-805e-499a-9cb2-9491e56b64f6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.480727] env[69927]: DEBUG nova.virt.hardware [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1203.481058] env[69927]: DEBUG nova.virt.hardware [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1203.481300] env[69927]: DEBUG nova.virt.hardware [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1203.481512] env[69927]: DEBUG nova.virt.hardware [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1203.481662] env[69927]: DEBUG nova.virt.hardware [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1203.481810] env[69927]: DEBUG nova.virt.hardware [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1203.482046] env[69927]: DEBUG nova.virt.hardware [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1203.482312] env[69927]: DEBUG nova.virt.hardware [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1203.482439] env[69927]: DEBUG nova.virt.hardware [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1203.482609] env[69927]: DEBUG nova.virt.hardware [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1203.482793] env[69927]: DEBUG nova.virt.hardware [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1203.489557] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Reconfiguring VM to attach interface {{(pid=69927) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1203.490704] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56646b55-cead-4a89-8e5d-c222bd2e9e0f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.507530] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1203.507748] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1203.510761] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1203.510896] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69927) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1203.513808] env[69927]: DEBUG oslo_vmware.api [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1203.513808] env[69927]: value = "task-4096775" [ 1203.513808] env[69927]: _type = "Task" [ 1203.513808] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.524211] env[69927]: DEBUG oslo_vmware.api [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096775, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.592877] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cafce470-7c09-4bdb-a6b1-4635d39a36f9 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "1b22fbb0-8628-4c69-b9b4-d6d294c7458b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.978s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1203.627818] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096774, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.705805] env[69927]: DEBUG nova.network.neutron [None req-5c95dc73-8fae-4720-b854-e72b83573f06 tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Updating instance_info_cache with network_info: [{"id": "c1c89675-9c86-4cf9-9c34-fdea74b6cf04", "address": "fa:16:3e:71:4d:cf", "network": {"id": "dd75b393-aca1-4de7-8249-d8eec36d040d", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1189166772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "999896abcbbd4ceea4fc2d898e025bc3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f85835c8-5d0c-4b2f-97c4-6c4006580f79", "external-id": "nsx-vlan-transportzone-245", "segmentation_id": 245, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1c89675-9c", "ovs_interfaceid": "c1c89675-9c86-4cf9-9c34-fdea74b6cf04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.717440] env[69927]: DEBUG nova.compute.manager [req-f4ce82b2-01f2-4cb1-bc5b-6c68b328f928 req-e86c1c1a-1a40-4676-beca-d5be9049f39f service nova] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Received event network-changed-c1c89675-9c86-4cf9-9c34-fdea74b6cf04 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1203.717440] env[69927]: DEBUG nova.compute.manager [req-f4ce82b2-01f2-4cb1-bc5b-6c68b328f928 req-e86c1c1a-1a40-4676-beca-d5be9049f39f service nova] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Refreshing instance network info cache due to event network-changed-c1c89675-9c86-4cf9-9c34-fdea74b6cf04. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1203.717440] env[69927]: DEBUG oslo_concurrency.lockutils [req-f4ce82b2-01f2-4cb1-bc5b-6c68b328f928 req-e86c1c1a-1a40-4676-beca-d5be9049f39f service nova] Acquiring lock "refresh_cache-7ff17f1d-31fd-440b-906c-2719770a9151" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.798272] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1203.798496] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1203.800324] env[69927]: INFO nova.compute.claims [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1203.806923] env[69927]: DEBUG nova.network.neutron [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Port 75ee960c-41d4-4858-8b1e-8198b77eb0d7 binding to destination host cpu-1 is already ACTIVE {{(pid=69927) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1204.026967] env[69927]: DEBUG oslo_vmware.api [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096775, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.040347] env[69927]: DEBUG nova.network.neutron [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Successfully updated port: 43c0ab61-f8c2-4ed2-8aa0-effc43628918 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1204.131683] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096774, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.175904] env[69927]: DEBUG nova.objects.instance [None req-6f0cb737-3c62-4bf0-878e-59e9867892fa tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Lazy-loading 'flavor' on Instance uuid 7ff17f1d-31fd-440b-906c-2719770a9151 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1204.202744] env[69927]: DEBUG nova.network.neutron [req-98f34c4f-2282-43b5-952c-f91ba2a90416 req-334a4dcd-7853-4703-a78b-c23fa3c66d76 service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Updated VIF entry in instance network info cache for port 70d5cdb3-0681-41ab-aa95-e5ae4a5245f3. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1204.203140] env[69927]: DEBUG nova.network.neutron [req-98f34c4f-2282-43b5-952c-f91ba2a90416 req-334a4dcd-7853-4703-a78b-c23fa3c66d76 service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Updating instance_info_cache with network_info: [{"id": "b92f830b-5eef-4260-a56b-94af4a4ec679", "address": "fa:16:3e:a4:e7:1b", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb92f830b-5e", "ovs_interfaceid": "b92f830b-5eef-4260-a56b-94af4a4ec679", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "70d5cdb3-0681-41ab-aa95-e5ae4a5245f3", "address": "fa:16:3e:32:88:2b", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70d5cdb3-06", "ovs_interfaceid": "70d5cdb3-0681-41ab-aa95-e5ae4a5245f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1204.208709] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c95dc73-8fae-4720-b854-e72b83573f06 tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Releasing lock "refresh_cache-7ff17f1d-31fd-440b-906c-2719770a9151" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1204.209487] env[69927]: DEBUG nova.compute.manager [None req-5c95dc73-8fae-4720-b854-e72b83573f06 tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Inject network info {{(pid=69927) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 1204.209487] env[69927]: DEBUG nova.compute.manager [None req-5c95dc73-8fae-4720-b854-e72b83573f06 tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] network_info to inject: |[{"id": "c1c89675-9c86-4cf9-9c34-fdea74b6cf04", "address": "fa:16:3e:71:4d:cf", "network": {"id": "dd75b393-aca1-4de7-8249-d8eec36d040d", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1189166772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "999896abcbbd4ceea4fc2d898e025bc3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f85835c8-5d0c-4b2f-97c4-6c4006580f79", "external-id": "nsx-vlan-transportzone-245", "segmentation_id": 245, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1c89675-9c", "ovs_interfaceid": "c1c89675-9c86-4cf9-9c34-fdea74b6cf04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 1204.214190] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5c95dc73-8fae-4720-b854-e72b83573f06 tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Reconfiguring VM instance to set the machine id {{(pid=69927) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1204.215106] env[69927]: DEBUG oslo_concurrency.lockutils [req-f4ce82b2-01f2-4cb1-bc5b-6c68b328f928 req-e86c1c1a-1a40-4676-beca-d5be9049f39f service nova] Acquired lock "refresh_cache-7ff17f1d-31fd-440b-906c-2719770a9151" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1204.215300] env[69927]: DEBUG nova.network.neutron [req-f4ce82b2-01f2-4cb1-bc5b-6c68b328f928 req-e86c1c1a-1a40-4676-beca-d5be9049f39f service nova] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Refreshing network info cache for port c1c89675-9c86-4cf9-9c34-fdea74b6cf04 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1204.216636] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-938b9dd8-c1b5-4647-bb2f-58918e213564 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.234637] env[69927]: DEBUG oslo_vmware.api [None req-5c95dc73-8fae-4720-b854-e72b83573f06 tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Waiting for the task: (returnval){ [ 1204.234637] env[69927]: value = "task-4096776" [ 1204.234637] env[69927]: _type = "Task" [ 1204.234637] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.244738] env[69927]: DEBUG oslo_vmware.api [None req-5c95dc73-8fae-4720-b854-e72b83573f06 tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': task-4096776, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.526313] env[69927]: DEBUG oslo_vmware.api [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096775, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.543543] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Acquiring lock "refresh_cache-2a0a6870-47ad-4958-afed-bdbda3e54c21" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.543543] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Acquired lock "refresh_cache-2a0a6870-47ad-4958-afed-bdbda3e54c21" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1204.543543] env[69927]: DEBUG nova.network.neutron [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1204.629544] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096774, 'name': CloneVM_Task} progress is 95%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.681814] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6f0cb737-3c62-4bf0-878e-59e9867892fa tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Acquiring lock "refresh_cache-7ff17f1d-31fd-440b-906c-2719770a9151" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.705582] env[69927]: DEBUG oslo_concurrency.lockutils [req-98f34c4f-2282-43b5-952c-f91ba2a90416 req-334a4dcd-7853-4703-a78b-c23fa3c66d76 service nova] Releasing lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1204.745087] env[69927]: DEBUG oslo_vmware.api [None req-5c95dc73-8fae-4720-b854-e72b83573f06 tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': task-4096776, 'name': ReconfigVM_Task, 'duration_secs': 0.223282} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.745386] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5c95dc73-8fae-4720-b854-e72b83573f06 tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Reconfigured VM instance to set the machine id {{(pid=69927) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1204.841158] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "56aec5c2-d344-4a8d-a55a-930bc425150a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1204.841308] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "56aec5c2-d344-4a8d-a55a-930bc425150a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1204.841754] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "56aec5c2-d344-4a8d-a55a-930bc425150a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1204.998356] env[69927]: DEBUG nova.network.neutron [req-f4ce82b2-01f2-4cb1-bc5b-6c68b328f928 req-e86c1c1a-1a40-4676-beca-d5be9049f39f service nova] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Updated VIF entry in instance network info cache for port c1c89675-9c86-4cf9-9c34-fdea74b6cf04. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1204.998723] env[69927]: DEBUG nova.network.neutron [req-f4ce82b2-01f2-4cb1-bc5b-6c68b328f928 req-e86c1c1a-1a40-4676-beca-d5be9049f39f service nova] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Updating instance_info_cache with network_info: [{"id": "c1c89675-9c86-4cf9-9c34-fdea74b6cf04", "address": "fa:16:3e:71:4d:cf", "network": {"id": "dd75b393-aca1-4de7-8249-d8eec36d040d", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1189166772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "999896abcbbd4ceea4fc2d898e025bc3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f85835c8-5d0c-4b2f-97c4-6c4006580f79", "external-id": "nsx-vlan-transportzone-245", "segmentation_id": 245, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1c89675-9c", "ovs_interfaceid": "c1c89675-9c86-4cf9-9c34-fdea74b6cf04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1205.027642] env[69927]: DEBUG oslo_vmware.api [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096775, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.086236] env[69927]: DEBUG nova.network.neutron [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1205.109716] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e377ef26-fc59-4f0b-87a2-48a8f3e2c9d7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.118149] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d4d4fb4-869c-4c44-a708-e80a894438d0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.132367] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096774, 'name': CloneVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.163927] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9314cbc6-d4b1-4ffd-9e83-f934364e793e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.168737] env[69927]: DEBUG nova.compute.manager [req-1e528627-87a7-4896-9182-e624c0d7d567 req-219a89db-a323-4ab2-80db-e63c5e88f381 service nova] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Received event network-vif-plugged-43c0ab61-f8c2-4ed2-8aa0-effc43628918 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1205.168990] env[69927]: DEBUG oslo_concurrency.lockutils [req-1e528627-87a7-4896-9182-e624c0d7d567 req-219a89db-a323-4ab2-80db-e63c5e88f381 service nova] Acquiring lock "2a0a6870-47ad-4958-afed-bdbda3e54c21-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1205.169218] env[69927]: DEBUG oslo_concurrency.lockutils [req-1e528627-87a7-4896-9182-e624c0d7d567 req-219a89db-a323-4ab2-80db-e63c5e88f381 service nova] Lock "2a0a6870-47ad-4958-afed-bdbda3e54c21-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.169384] env[69927]: DEBUG oslo_concurrency.lockutils [req-1e528627-87a7-4896-9182-e624c0d7d567 req-219a89db-a323-4ab2-80db-e63c5e88f381 service nova] Lock "2a0a6870-47ad-4958-afed-bdbda3e54c21-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1205.169546] env[69927]: DEBUG nova.compute.manager [req-1e528627-87a7-4896-9182-e624c0d7d567 req-219a89db-a323-4ab2-80db-e63c5e88f381 service nova] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] No waiting events found dispatching network-vif-plugged-43c0ab61-f8c2-4ed2-8aa0-effc43628918 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1205.169708] env[69927]: WARNING nova.compute.manager [req-1e528627-87a7-4896-9182-e624c0d7d567 req-219a89db-a323-4ab2-80db-e63c5e88f381 service nova] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Received unexpected event network-vif-plugged-43c0ab61-f8c2-4ed2-8aa0-effc43628918 for instance with vm_state building and task_state spawning. [ 1205.169885] env[69927]: DEBUG nova.compute.manager [req-1e528627-87a7-4896-9182-e624c0d7d567 req-219a89db-a323-4ab2-80db-e63c5e88f381 service nova] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Received event network-changed-43c0ab61-f8c2-4ed2-8aa0-effc43628918 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1205.170047] env[69927]: DEBUG nova.compute.manager [req-1e528627-87a7-4896-9182-e624c0d7d567 req-219a89db-a323-4ab2-80db-e63c5e88f381 service nova] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Refreshing instance network info cache due to event network-changed-43c0ab61-f8c2-4ed2-8aa0-effc43628918. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1205.170222] env[69927]: DEBUG oslo_concurrency.lockutils [req-1e528627-87a7-4896-9182-e624c0d7d567 req-219a89db-a323-4ab2-80db-e63c5e88f381 service nova] Acquiring lock "refresh_cache-2a0a6870-47ad-4958-afed-bdbda3e54c21" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.178754] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c69a8cf-3fc6-4bf3-aaf2-63bcc4c4de25 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.194849] env[69927]: DEBUG nova.compute.provider_tree [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1205.272431] env[69927]: DEBUG nova.network.neutron [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Updating instance_info_cache with network_info: [{"id": "43c0ab61-f8c2-4ed2-8aa0-effc43628918", "address": "fa:16:3e:ab:b3:52", "network": {"id": "f547c151-2ec8-4fc7-a16c-c8abb6cfd8b9", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-921215762-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c73f101f61ff4315a9e407b784df06f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32faf59b-014c-4f1f-8331-40df95bf741f", "external-id": "nsx-vlan-transportzone-996", "segmentation_id": 996, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43c0ab61-f8", "ovs_interfaceid": "43c0ab61-f8c2-4ed2-8aa0-effc43628918", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1205.502057] env[69927]: DEBUG oslo_concurrency.lockutils [req-f4ce82b2-01f2-4cb1-bc5b-6c68b328f928 req-e86c1c1a-1a40-4676-beca-d5be9049f39f service nova] Releasing lock "refresh_cache-7ff17f1d-31fd-440b-906c-2719770a9151" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1205.502553] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6f0cb737-3c62-4bf0-878e-59e9867892fa tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Acquired lock "refresh_cache-7ff17f1d-31fd-440b-906c-2719770a9151" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1205.532543] env[69927]: DEBUG oslo_vmware.api [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096775, 'name': ReconfigVM_Task, 'duration_secs': 1.809228} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.533352] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1205.533843] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Reconfigured VM to attach interface {{(pid=69927) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1205.632257] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096774, 'name': CloneVM_Task, 'duration_secs': 2.026559} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.632622] env[69927]: INFO nova.virt.vmwareapi.vmops [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Created linked-clone VM from snapshot [ 1205.633264] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b1d5ac9-459a-4ed3-9489-aaa45f1a20b5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.641271] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Uploading image 87cd0321-c9d5-427e-8af6-c3bd78649765 {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1205.667241] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1205.667241] env[69927]: value = "vm-811582" [ 1205.667241] env[69927]: _type = "VirtualMachine" [ 1205.667241] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1205.667548] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-154a23df-b0a8-4b41-b740-3b67d10165aa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.675947] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lease: (returnval){ [ 1205.675947] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528caaed-005f-5a59-6077-f4ef4ccf10d4" [ 1205.675947] env[69927]: _type = "HttpNfcLease" [ 1205.675947] env[69927]: } obtained for exporting VM: (result){ [ 1205.675947] env[69927]: value = "vm-811582" [ 1205.675947] env[69927]: _type = "VirtualMachine" [ 1205.675947] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1205.675947] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the lease: (returnval){ [ 1205.675947] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528caaed-005f-5a59-6077-f4ef4ccf10d4" [ 1205.675947] env[69927]: _type = "HttpNfcLease" [ 1205.675947] env[69927]: } to be ready. {{(pid=69927) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1205.682107] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1205.682107] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528caaed-005f-5a59-6077-f4ef4ccf10d4" [ 1205.682107] env[69927]: _type = "HttpNfcLease" [ 1205.682107] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1205.698265] env[69927]: DEBUG nova.scheduler.client.report [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1205.775211] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Releasing lock "refresh_cache-2a0a6870-47ad-4958-afed-bdbda3e54c21" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1205.775577] env[69927]: DEBUG nova.compute.manager [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Instance network_info: |[{"id": "43c0ab61-f8c2-4ed2-8aa0-effc43628918", "address": "fa:16:3e:ab:b3:52", "network": {"id": "f547c151-2ec8-4fc7-a16c-c8abb6cfd8b9", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-921215762-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c73f101f61ff4315a9e407b784df06f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32faf59b-014c-4f1f-8331-40df95bf741f", "external-id": "nsx-vlan-transportzone-996", "segmentation_id": 996, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43c0ab61-f8", "ovs_interfaceid": "43c0ab61-f8c2-4ed2-8aa0-effc43628918", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1205.775892] env[69927]: DEBUG oslo_concurrency.lockutils [req-1e528627-87a7-4896-9182-e624c0d7d567 req-219a89db-a323-4ab2-80db-e63c5e88f381 service nova] Acquired lock "refresh_cache-2a0a6870-47ad-4958-afed-bdbda3e54c21" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1205.776089] env[69927]: DEBUG nova.network.neutron [req-1e528627-87a7-4896-9182-e624c0d7d567 req-219a89db-a323-4ab2-80db-e63c5e88f381 service nova] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Refreshing network info cache for port 43c0ab61-f8c2-4ed2-8aa0-effc43628918 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1205.777459] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:b3:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32faf59b-014c-4f1f-8331-40df95bf741f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '43c0ab61-f8c2-4ed2-8aa0-effc43628918', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1205.786025] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Creating folder: Project (c73f101f61ff4315a9e407b784df06f4). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1205.787009] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4e979717-4b7c-4673-b0a6-f9a2e2e84c3e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.799959] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Created folder: Project (c73f101f61ff4315a9e407b784df06f4) in parent group-v811283. [ 1205.800151] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Creating folder: Instances. Parent ref: group-v811583. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1205.800398] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-78a90b80-5e78-4ea2-9fea-a77a823fe82c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.812984] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Created folder: Instances in parent group-v811583. [ 1205.813651] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1205.813651] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1205.814435] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-57c86938-1049-4c76-9cdb-12941b4ce990 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.847602] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1205.847602] env[69927]: value = "task-4096780" [ 1205.847602] env[69927]: _type = "Task" [ 1205.847602] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.863947] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096780, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.865561] env[69927]: DEBUG nova.network.neutron [None req-6f0cb737-3c62-4bf0-878e-59e9867892fa tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1205.913312] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "refresh_cache-56aec5c2-d344-4a8d-a55a-930bc425150a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.913519] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired lock "refresh_cache-56aec5c2-d344-4a8d-a55a-930bc425150a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1205.913658] env[69927]: DEBUG nova.network.neutron [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1206.040935] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6a04296f-5638-478e-9620-5a8c3110b5fc tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "interface-d548ea75-9c1f-4884-b338-194f1b5d62ef-70d5cdb3-0681-41ab-aa95-e5ae4a5245f3" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.177s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1206.168208] env[69927]: DEBUG nova.compute.manager [req-f0414768-be37-4aca-9560-233505f684f9 req-d13bd1ce-da91-4225-9799-dd54fc7bfbf7 service nova] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Received event network-changed-c1c89675-9c86-4cf9-9c34-fdea74b6cf04 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1206.168461] env[69927]: DEBUG nova.compute.manager [req-f0414768-be37-4aca-9560-233505f684f9 req-d13bd1ce-da91-4225-9799-dd54fc7bfbf7 service nova] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Refreshing instance network info cache due to event network-changed-c1c89675-9c86-4cf9-9c34-fdea74b6cf04. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1206.168665] env[69927]: DEBUG oslo_concurrency.lockutils [req-f0414768-be37-4aca-9560-233505f684f9 req-d13bd1ce-da91-4225-9799-dd54fc7bfbf7 service nova] Acquiring lock "refresh_cache-7ff17f1d-31fd-440b-906c-2719770a9151" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1206.183666] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1206.183666] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528caaed-005f-5a59-6077-f4ef4ccf10d4" [ 1206.183666] env[69927]: _type = "HttpNfcLease" [ 1206.183666] env[69927]: } is ready. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1206.184247] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1206.184247] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528caaed-005f-5a59-6077-f4ef4ccf10d4" [ 1206.184247] env[69927]: _type = "HttpNfcLease" [ 1206.184247] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1206.184993] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3d8227-6a24-401f-8128-6bc10ea7061f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.193244] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52433eaf-80b6-7164-2506-aace7db5555d/disk-0.vmdk from lease info. {{(pid=69927) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1206.193452] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52433eaf-80b6-7164-2506-aace7db5555d/disk-0.vmdk for reading. {{(pid=69927) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1206.250013] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.451s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1206.250556] env[69927]: DEBUG nova.compute.manager [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1206.297389] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-187d53a9-0faa-4613-89b7-de717100f980 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.358345] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096780, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.445877] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "afdd23d0-c8e0-4d49-a188-525b6b3f31c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1206.446716] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "afdd23d0-c8e0-4d49-a188-525b6b3f31c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1206.509012] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1206.509652] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager.update_available_resource {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1206.599034] env[69927]: DEBUG nova.network.neutron [req-1e528627-87a7-4896-9182-e624c0d7d567 req-219a89db-a323-4ab2-80db-e63c5e88f381 service nova] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Updated VIF entry in instance network info cache for port 43c0ab61-f8c2-4ed2-8aa0-effc43628918. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1206.599435] env[69927]: DEBUG nova.network.neutron [req-1e528627-87a7-4896-9182-e624c0d7d567 req-219a89db-a323-4ab2-80db-e63c5e88f381 service nova] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Updating instance_info_cache with network_info: [{"id": "43c0ab61-f8c2-4ed2-8aa0-effc43628918", "address": "fa:16:3e:ab:b3:52", "network": {"id": "f547c151-2ec8-4fc7-a16c-c8abb6cfd8b9", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-921215762-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c73f101f61ff4315a9e407b784df06f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32faf59b-014c-4f1f-8331-40df95bf741f", "external-id": "nsx-vlan-transportzone-996", "segmentation_id": 996, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43c0ab61-f8", "ovs_interfaceid": "43c0ab61-f8c2-4ed2-8aa0-effc43628918", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.755865] env[69927]: DEBUG nova.compute.utils [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1206.757958] env[69927]: DEBUG nova.compute.manager [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1206.758343] env[69927]: DEBUG nova.network.neutron [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1206.766246] env[69927]: DEBUG nova.network.neutron [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Updating instance_info_cache with network_info: [{"id": "75ee960c-41d4-4858-8b1e-8198b77eb0d7", "address": "fa:16:3e:60:97:a0", "network": {"id": "2cac2664-2b1d-4bb6-a58a-f8e96679d038", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1945522269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3afde63c8cbe4aecb32a470fd6b948f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75ee960c-41", "ovs_interfaceid": "75ee960c-41d4-4858-8b1e-8198b77eb0d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.775143] env[69927]: DEBUG nova.network.neutron [None req-6f0cb737-3c62-4bf0-878e-59e9867892fa tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Updating instance_info_cache with network_info: [{"id": "c1c89675-9c86-4cf9-9c34-fdea74b6cf04", "address": "fa:16:3e:71:4d:cf", "network": {"id": "dd75b393-aca1-4de7-8249-d8eec36d040d", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1189166772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "999896abcbbd4ceea4fc2d898e025bc3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f85835c8-5d0c-4b2f-97c4-6c4006580f79", "external-id": "nsx-vlan-transportzone-245", "segmentation_id": 245, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1c89675-9c", "ovs_interfaceid": "c1c89675-9c86-4cf9-9c34-fdea74b6cf04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.800142] env[69927]: DEBUG nova.policy [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '76414b2ae1aa4ab582c2b59fd4218005', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '544f2a021144492ba1aea46ce6075e53', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1206.859131] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096780, 'name': CreateVM_Task, 'duration_secs': 0.592604} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.859411] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1206.860324] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1206.860559] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1206.861118] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1206.861391] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bad10d2-bc7e-4949-b48f-11b90d6da8f5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.867237] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Waiting for the task: (returnval){ [ 1206.867237] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52815314-37e0-1c6f-a817-aa63cb40df7a" [ 1206.867237] env[69927]: _type = "Task" [ 1206.867237] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.877219] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52815314-37e0-1c6f-a817-aa63cb40df7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.910584] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquiring lock "cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1206.910843] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1206.911141] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquiring lock "cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1206.911681] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1206.911987] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1206.918416] env[69927]: INFO nova.compute.manager [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Terminating instance [ 1206.950590] env[69927]: DEBUG nova.compute.manager [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1207.016461] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1207.016729] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1207.016908] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1207.017293] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69927) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1207.018518] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97e929e3-cea7-4a19-8f08-59aa94ac33fd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.031285] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8756cc35-22da-4a27-9ee2-8b1a87fcce90 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.049298] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f2f71d6-5f32-4a14-b5c8-bf1ccdf44262 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.059570] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b00a7d3-bfa0-4f56-ba4d-ba42f89ce1a3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.094969] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179625MB free_disk=16GB free_vcpus=48 pci_devices=None {{(pid=69927) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1207.095266] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1207.095504] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1207.102121] env[69927]: DEBUG oslo_concurrency.lockutils [req-1e528627-87a7-4896-9182-e624c0d7d567 req-219a89db-a323-4ab2-80db-e63c5e88f381 service nova] Releasing lock "refresh_cache-2a0a6870-47ad-4958-afed-bdbda3e54c21" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1207.262253] env[69927]: DEBUG nova.compute.manager [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1207.270393] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Releasing lock "refresh_cache-56aec5c2-d344-4a8d-a55a-930bc425150a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1207.281232] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6f0cb737-3c62-4bf0-878e-59e9867892fa tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Releasing lock "refresh_cache-7ff17f1d-31fd-440b-906c-2719770a9151" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1207.281879] env[69927]: DEBUG nova.compute.manager [None req-6f0cb737-3c62-4bf0-878e-59e9867892fa tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Inject network info {{(pid=69927) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 1207.282173] env[69927]: DEBUG nova.compute.manager [None req-6f0cb737-3c62-4bf0-878e-59e9867892fa tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] network_info to inject: |[{"id": "c1c89675-9c86-4cf9-9c34-fdea74b6cf04", "address": "fa:16:3e:71:4d:cf", "network": {"id": "dd75b393-aca1-4de7-8249-d8eec36d040d", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1189166772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "999896abcbbd4ceea4fc2d898e025bc3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f85835c8-5d0c-4b2f-97c4-6c4006580f79", "external-id": "nsx-vlan-transportzone-245", "segmentation_id": 245, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1c89675-9c", "ovs_interfaceid": "c1c89675-9c86-4cf9-9c34-fdea74b6cf04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 1207.287370] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6f0cb737-3c62-4bf0-878e-59e9867892fa tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Reconfiguring VM instance to set the machine id {{(pid=69927) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1207.288032] env[69927]: DEBUG oslo_concurrency.lockutils [req-f0414768-be37-4aca-9560-233505f684f9 req-d13bd1ce-da91-4225-9799-dd54fc7bfbf7 service nova] Acquired lock "refresh_cache-7ff17f1d-31fd-440b-906c-2719770a9151" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1207.288231] env[69927]: DEBUG nova.network.neutron [req-f0414768-be37-4aca-9560-233505f684f9 req-d13bd1ce-da91-4225-9799-dd54fc7bfbf7 service nova] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Refreshing network info cache for port c1c89675-9c86-4cf9-9c34-fdea74b6cf04 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1207.289936] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a191ad1d-1114-4524-acea-080ef6316593 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.309291] env[69927]: DEBUG oslo_vmware.api [None req-6f0cb737-3c62-4bf0-878e-59e9867892fa tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Waiting for the task: (returnval){ [ 1207.309291] env[69927]: value = "task-4096781" [ 1207.309291] env[69927]: _type = "Task" [ 1207.309291] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.327125] env[69927]: DEBUG oslo_vmware.api [None req-6f0cb737-3c62-4bf0-878e-59e9867892fa tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': task-4096781, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.340308] env[69927]: DEBUG nova.network.neutron [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Successfully created port: 43121e87-0514-4fd2-a304-b280aae31175 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1207.380041] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52815314-37e0-1c6f-a817-aa63cb40df7a, 'name': SearchDatastore_Task, 'duration_secs': 0.019113} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.381577] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1207.381577] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1207.381577] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1207.381759] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1207.385175] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1207.385556] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c6bff1dc-2b60-4adb-a7c2-d3e9f0cf51ca {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.402035] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1207.402564] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1207.403615] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ae25724-90c0-4b9e-9d4d-7265c68a1d12 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.412643] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Waiting for the task: (returnval){ [ 1207.412643] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52176c26-23ee-dd06-bee8-aa2fd08debe9" [ 1207.412643] env[69927]: _type = "Task" [ 1207.412643] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.426111] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52176c26-23ee-dd06-bee8-aa2fd08debe9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.426111] env[69927]: DEBUG nova.compute.manager [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1207.426111] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1207.426111] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4d1245-9ee6-4773-889c-2a3074532f35 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.434343] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1207.434689] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aa2cbc84-6e80-41fa-9f0d-f91b1234ba64 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.443267] env[69927]: DEBUG oslo_vmware.api [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1207.443267] env[69927]: value = "task-4096782" [ 1207.443267] env[69927]: _type = "Task" [ 1207.443267] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.459398] env[69927]: DEBUG oslo_vmware.api [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096782, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.480426] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1207.809823] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f5104a7-90e9-4d32-b609-983407e39a88 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.842305] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "interface-d548ea75-9c1f-4884-b338-194f1b5d62ef-70d5cdb3-0681-41ab-aa95-e5ae4a5245f3" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1207.842958] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "interface-d548ea75-9c1f-4884-b338-194f1b5d62ef-70d5cdb3-0681-41ab-aa95-e5ae4a5245f3" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1207.845043] env[69927]: DEBUG oslo_vmware.api [None req-6f0cb737-3c62-4bf0-878e-59e9867892fa tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': task-4096781, 'name': ReconfigVM_Task, 'duration_secs': 0.211628} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.847401] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b9e783-2e0a-4a24-8883-facc9e6399ec {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.850751] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-6f0cb737-3c62-4bf0-878e-59e9867892fa tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Reconfigured VM instance to set the machine id {{(pid=69927) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1207.862188] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Updating instance '56aec5c2-d344-4a8d-a55a-930bc425150a' progress to 83 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1207.927310] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52176c26-23ee-dd06-bee8-aa2fd08debe9, 'name': SearchDatastore_Task, 'duration_secs': 0.015071} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.928184] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26868695-80a5-436a-9165-a325167fd5f9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.935645] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Waiting for the task: (returnval){ [ 1207.935645] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525e7b26-f78b-2ef0-b515-ee37da610fee" [ 1207.935645] env[69927]: _type = "Task" [ 1207.935645] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.947278] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525e7b26-f78b-2ef0-b515-ee37da610fee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.956751] env[69927]: DEBUG oslo_vmware.api [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096782, 'name': PowerOffVM_Task, 'duration_secs': 0.371983} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.957188] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1207.957440] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1207.957734] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-798d6333-57d0-4991-b1cf-8934b6789237 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.016050] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Acquiring lock "7ff17f1d-31fd-440b-906c-2719770a9151" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1208.016368] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Lock "7ff17f1d-31fd-440b-906c-2719770a9151" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1208.016657] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Acquiring lock "7ff17f1d-31fd-440b-906c-2719770a9151-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1208.016934] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Lock "7ff17f1d-31fd-440b-906c-2719770a9151-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1208.017218] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Lock "7ff17f1d-31fd-440b-906c-2719770a9151-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1208.020442] env[69927]: INFO nova.compute.manager [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Terminating instance [ 1208.110510] env[69927]: DEBUG nova.network.neutron [req-f0414768-be37-4aca-9560-233505f684f9 req-d13bd1ce-da91-4225-9799-dd54fc7bfbf7 service nova] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Updated VIF entry in instance network info cache for port c1c89675-9c86-4cf9-9c34-fdea74b6cf04. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1208.110990] env[69927]: DEBUG nova.network.neutron [req-f0414768-be37-4aca-9560-233505f684f9 req-d13bd1ce-da91-4225-9799-dd54fc7bfbf7 service nova] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Updating instance_info_cache with network_info: [{"id": "c1c89675-9c86-4cf9-9c34-fdea74b6cf04", "address": "fa:16:3e:71:4d:cf", "network": {"id": "dd75b393-aca1-4de7-8249-d8eec36d040d", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1189166772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "999896abcbbd4ceea4fc2d898e025bc3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f85835c8-5d0c-4b2f-97c4-6c4006580f79", "external-id": "nsx-vlan-transportzone-245", "segmentation_id": 245, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1c89675-9c", "ovs_interfaceid": "c1c89675-9c86-4cf9-9c34-fdea74b6cf04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1208.113886] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Applying migration context for instance 56aec5c2-d344-4a8d-a55a-930bc425150a as it has an incoming, in-progress migration d6355cd8-1fbd-4455-b64b-c3b173c51f88. Migration status is post-migrating {{(pid=69927) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1046}} [ 1208.119350] env[69927]: INFO nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Updating resource usage from migration d6355cd8-1fbd-4455-b64b-c3b173c51f88 [ 1208.136281] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1208.136281] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 15c44d86-829f-4317-ab66-9e61d4fb4dd0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1208.136435] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance cff307ed-3c8b-4126-9749-1204597cbf6c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1208.136475] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 693a6c6b-8d1c-405e-bb17-73259e28f556 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1208.136564] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 7ff17f1d-31fd-440b-906c-2719770a9151 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1208.136683] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance d548ea75-9c1f-4884-b338-194f1b5d62ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1208.136933] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance da468d11-82a4-4fec-b06a-1b522bacdbc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1208.137075] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1208.137196] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 5c87c74d-5998-4dfc-bc3c-c2887ff25195 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1208.137311] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 8b70b479-4a54-4bcb-813d-16cc0c9a67c5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1208.137423] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance b007a697-7da4-4c97-9ccb-046d86b27568 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1208.137535] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 07814f60-1886-4b06-bcf7-e2c9b95a4501 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1208.137736] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Migration d6355cd8-1fbd-4455-b64b-c3b173c51f88 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1742}} [ 1208.137840] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 56aec5c2-d344-4a8d-a55a-930bc425150a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1208.137949] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 2a0a6870-47ad-4958-afed-bdbda3e54c21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1208.138082] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance e38222c4-3362-4d47-aee4-d26ccb4cbf3c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1208.276832] env[69927]: DEBUG nova.compute.manager [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1208.308309] env[69927]: DEBUG nova.virt.hardware [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1208.308565] env[69927]: DEBUG nova.virt.hardware [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1208.308723] env[69927]: DEBUG nova.virt.hardware [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1208.309349] env[69927]: DEBUG nova.virt.hardware [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1208.309349] env[69927]: DEBUG nova.virt.hardware [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1208.309349] env[69927]: DEBUG nova.virt.hardware [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1208.309510] env[69927]: DEBUG nova.virt.hardware [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1208.309582] env[69927]: DEBUG nova.virt.hardware [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1208.309749] env[69927]: DEBUG nova.virt.hardware [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1208.309915] env[69927]: DEBUG nova.virt.hardware [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1208.310102] env[69927]: DEBUG nova.virt.hardware [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1208.310986] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d17d329-6490-44bf-9f0c-21f18041f243 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.319605] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd4fddb2-179c-4513-8286-a68b74316559 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.355159] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1208.355396] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1208.358827] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3904541d-3431-4313-a720-b5b4e98ce548 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.379445] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1208.379881] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6e270228-6801-4035-b006-827e98bb7f53 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.382238] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eef8475c-f801-48ad-a831-30038f274978 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.411615] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Reconfiguring VM to detach interface {{(pid=69927) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1208.414881] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b80ecdec-0601-46e9-babf-cfdff0cd8532 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.429679] env[69927]: DEBUG oslo_vmware.api [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1208.429679] env[69927]: value = "task-4096784" [ 1208.429679] env[69927]: _type = "Task" [ 1208.429679] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.436468] env[69927]: DEBUG oslo_vmware.api [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1208.436468] env[69927]: value = "task-4096785" [ 1208.436468] env[69927]: _type = "Task" [ 1208.436468] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.443127] env[69927]: DEBUG oslo_vmware.api [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096784, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.454493] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525e7b26-f78b-2ef0-b515-ee37da610fee, 'name': SearchDatastore_Task, 'duration_secs': 0.021862} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.458119] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1208.458747] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 2a0a6870-47ad-4958-afed-bdbda3e54c21/2a0a6870-47ad-4958-afed-bdbda3e54c21.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1208.459147] env[69927]: DEBUG oslo_vmware.api [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096785, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.459408] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-28e8bb16-29e5-430b-a52b-f671f26e77f6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.468327] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Waiting for the task: (returnval){ [ 1208.468327] env[69927]: value = "task-4096786" [ 1208.468327] env[69927]: _type = "Task" [ 1208.468327] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.478171] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Task: {'id': task-4096786, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.526031] env[69927]: DEBUG nova.compute.manager [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1208.526031] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1208.527086] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c418ef8-655f-457c-84d9-1c8d67e4ab44 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.535820] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1208.536147] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0147210c-4796-424d-9d28-e4e62efbd48d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.543452] env[69927]: DEBUG oslo_vmware.api [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Waiting for the task: (returnval){ [ 1208.543452] env[69927]: value = "task-4096787" [ 1208.543452] env[69927]: _type = "Task" [ 1208.543452] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.552710] env[69927]: DEBUG oslo_vmware.api [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': task-4096787, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.620729] env[69927]: DEBUG oslo_concurrency.lockutils [req-f0414768-be37-4aca-9560-233505f684f9 req-d13bd1ce-da91-4225-9799-dd54fc7bfbf7 service nova] Releasing lock "refresh_cache-7ff17f1d-31fd-440b-906c-2719770a9151" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1208.641072] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance afdd23d0-c8e0-4d49-a188-525b6b3f31c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 1208.641455] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1208.641657] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3648MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1208.947399] env[69927]: DEBUG oslo_vmware.api [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096784, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.962669] env[69927]: DEBUG oslo_vmware.api [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096785, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.984188] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Task: {'id': task-4096786, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.042643] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d19713c8-da3b-4646-a08c-621ebb08ef4c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.056852] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd0ec23-1f84-4f01-852d-85464891a75e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.063991] env[69927]: DEBUG oslo_vmware.api [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': task-4096787, 'name': PowerOffVM_Task, 'duration_secs': 0.235483} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.064867] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1209.065095] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1209.065437] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9a5181c5-04ae-43e8-9e57-7a3422382d48 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.094788] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b955ba-45ae-43f8-af0b-ebdb6e2cf0a9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.104208] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a7a0f27-f3d2-415b-bce5-d61b82567e44 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.122572] env[69927]: DEBUG nova.compute.provider_tree [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1209.150276] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1209.150559] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1209.151204] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Deleting the datastore file [datastore2] 7ff17f1d-31fd-440b-906c-2719770a9151 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1209.151204] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-40b48c59-90dc-4a3a-86fe-2059e5f1a7b4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.159034] env[69927]: DEBUG oslo_vmware.api [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Waiting for the task: (returnval){ [ 1209.159034] env[69927]: value = "task-4096789" [ 1209.159034] env[69927]: _type = "Task" [ 1209.159034] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.169732] env[69927]: DEBUG oslo_vmware.api [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': task-4096789, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.444903] env[69927]: DEBUG oslo_vmware.api [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096784, 'name': PowerOnVM_Task, 'duration_secs': 0.597548} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.445275] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1209.445564] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5c201fac-050c-4d36-8498-67b339d26c24 tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Updating instance '56aec5c2-d344-4a8d-a55a-930bc425150a' progress to 100 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1209.460086] env[69927]: DEBUG oslo_vmware.api [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096785, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.479857] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Task: {'id': task-4096786, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.639291} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.483034] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 2a0a6870-47ad-4958-afed-bdbda3e54c21/2a0a6870-47ad-4958-afed-bdbda3e54c21.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1209.483034] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1209.483034] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed3558a7-e273-46ef-b6df-07ce19d01b91 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.486166] env[69927]: DEBUG nova.compute.manager [req-5bc9406f-9cc8-4795-9c95-c7074ce0e4a2 req-b60a5106-8e95-469a-9f9f-e16892e612f8 service nova] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Received event network-vif-plugged-43121e87-0514-4fd2-a304-b280aae31175 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1209.486276] env[69927]: DEBUG oslo_concurrency.lockutils [req-5bc9406f-9cc8-4795-9c95-c7074ce0e4a2 req-b60a5106-8e95-469a-9f9f-e16892e612f8 service nova] Acquiring lock "e38222c4-3362-4d47-aee4-d26ccb4cbf3c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.486473] env[69927]: DEBUG oslo_concurrency.lockutils [req-5bc9406f-9cc8-4795-9c95-c7074ce0e4a2 req-b60a5106-8e95-469a-9f9f-e16892e612f8 service nova] Lock "e38222c4-3362-4d47-aee4-d26ccb4cbf3c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.486632] env[69927]: DEBUG oslo_concurrency.lockutils [req-5bc9406f-9cc8-4795-9c95-c7074ce0e4a2 req-b60a5106-8e95-469a-9f9f-e16892e612f8 service nova] Lock "e38222c4-3362-4d47-aee4-d26ccb4cbf3c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.486843] env[69927]: DEBUG nova.compute.manager [req-5bc9406f-9cc8-4795-9c95-c7074ce0e4a2 req-b60a5106-8e95-469a-9f9f-e16892e612f8 service nova] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] No waiting events found dispatching network-vif-plugged-43121e87-0514-4fd2-a304-b280aae31175 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1209.487101] env[69927]: WARNING nova.compute.manager [req-5bc9406f-9cc8-4795-9c95-c7074ce0e4a2 req-b60a5106-8e95-469a-9f9f-e16892e612f8 service nova] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Received unexpected event network-vif-plugged-43121e87-0514-4fd2-a304-b280aae31175 for instance with vm_state building and task_state spawning. [ 1209.489487] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Waiting for the task: (returnval){ [ 1209.489487] env[69927]: value = "task-4096790" [ 1209.489487] env[69927]: _type = "Task" [ 1209.489487] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.501035] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Task: {'id': task-4096790, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.628291] env[69927]: DEBUG nova.scheduler.client.report [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1209.642434] env[69927]: DEBUG nova.network.neutron [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Successfully updated port: 43121e87-0514-4fd2-a304-b280aae31175 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1209.670363] env[69927]: DEBUG oslo_vmware.api [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Task: {'id': task-4096789, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.205461} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.670986] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1209.671412] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1209.671761] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1209.672099] env[69927]: INFO nova.compute.manager [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1209.672521] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1209.672928] env[69927]: DEBUG nova.compute.manager [-] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1209.673205] env[69927]: DEBUG nova.network.neutron [-] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1209.967651] env[69927]: DEBUG oslo_vmware.api [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096785, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.001793] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Task: {'id': task-4096790, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.207287} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.001793] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1210.002419] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a09833a3-fa97-4155-8e03-ac641be851c9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.029905] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] 2a0a6870-47ad-4958-afed-bdbda3e54c21/2a0a6870-47ad-4958-afed-bdbda3e54c21.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1210.031458] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1363124f-4aab-4a38-aec8-527f4045fa44 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.054398] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1210.054855] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1210.055186] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Deleting the datastore file [datastore1] cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1210.056152] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b3a67352-3f7d-4044-b570-5f169cfd969b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.064813] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Waiting for the task: (returnval){ [ 1210.064813] env[69927]: value = "task-4096791" [ 1210.064813] env[69927]: _type = "Task" [ 1210.064813] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.066559] env[69927]: DEBUG oslo_vmware.api [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for the task: (returnval){ [ 1210.066559] env[69927]: value = "task-4096792" [ 1210.066559] env[69927]: _type = "Task" [ 1210.066559] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.080664] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Task: {'id': task-4096791, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.083501] env[69927]: DEBUG oslo_vmware.api [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096792, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.132538] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69927) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1210.133095] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.037s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1210.133852] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.653s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1210.135508] env[69927]: INFO nova.compute.claims [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1210.143285] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "refresh_cache-e38222c4-3362-4d47-aee4-d26ccb4cbf3c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1210.143442] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired lock "refresh_cache-e38222c4-3362-4d47-aee4-d26ccb4cbf3c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1210.143615] env[69927]: DEBUG nova.network.neutron [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1210.424946] env[69927]: DEBUG oslo_concurrency.lockutils [None req-50e6092a-78f9-4475-8df1-0cab44a89a7e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "da468d11-82a4-4fec-b06a-1b522bacdbc2" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1210.425265] env[69927]: DEBUG oslo_concurrency.lockutils [None req-50e6092a-78f9-4475-8df1-0cab44a89a7e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "da468d11-82a4-4fec-b06a-1b522bacdbc2" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1210.425542] env[69927]: DEBUG nova.compute.manager [None req-50e6092a-78f9-4475-8df1-0cab44a89a7e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1210.426665] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b5eaf5-c1f1-44a2-9583-e1dc8fdc92f9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.436101] env[69927]: DEBUG nova.compute.manager [None req-50e6092a-78f9-4475-8df1-0cab44a89a7e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69927) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1210.436756] env[69927]: DEBUG nova.objects.instance [None req-50e6092a-78f9-4475-8df1-0cab44a89a7e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lazy-loading 'flavor' on Instance uuid da468d11-82a4-4fec-b06a-1b522bacdbc2 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1210.466792] env[69927]: DEBUG oslo_vmware.api [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096785, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.579958] env[69927]: DEBUG oslo_vmware.api [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Task: {'id': task-4096792, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.24522} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.586750] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1210.586994] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1210.587558] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1210.587829] env[69927]: INFO nova.compute.manager [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Took 3.16 seconds to destroy the instance on the hypervisor. [ 1210.588116] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1210.588722] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Task: {'id': task-4096791, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.588992] env[69927]: DEBUG nova.compute.manager [-] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1210.589112] env[69927]: DEBUG nova.network.neutron [-] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1210.680879] env[69927]: DEBUG nova.network.neutron [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1210.772918] env[69927]: DEBUG nova.network.neutron [-] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.845843] env[69927]: DEBUG nova.network.neutron [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Updating instance_info_cache with network_info: [{"id": "43121e87-0514-4fd2-a304-b280aae31175", "address": "fa:16:3e:26:34:9a", "network": {"id": "5e9a6068-d0f1-44b2-b2a2-b87ebc0d53f2", "bridge": "br-int", "label": "tempest-ServersTestJSON-2067990706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "544f2a021144492ba1aea46ce6075e53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43121e87-05", "ovs_interfaceid": "43121e87-0514-4fd2-a304-b280aae31175", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.974190] env[69927]: DEBUG oslo_vmware.api [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096785, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.077439] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Task: {'id': task-4096791, 'name': ReconfigVM_Task, 'duration_secs': 0.738218} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.077572] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Reconfigured VM instance instance-0000006d to attach disk [datastore1] 2a0a6870-47ad-4958-afed-bdbda3e54c21/2a0a6870-47ad-4958-afed-bdbda3e54c21.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1211.078311] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8426f8c1-e629-4a52-ac5c-ce73efee0b54 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.086888] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Waiting for the task: (returnval){ [ 1211.086888] env[69927]: value = "task-4096793" [ 1211.086888] env[69927]: _type = "Task" [ 1211.086888] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.098560] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Task: {'id': task-4096793, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.275549] env[69927]: INFO nova.compute.manager [-] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Took 1.60 seconds to deallocate network for instance. [ 1211.350320] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Releasing lock "refresh_cache-e38222c4-3362-4d47-aee4-d26ccb4cbf3c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1211.350632] env[69927]: DEBUG nova.compute.manager [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Instance network_info: |[{"id": "43121e87-0514-4fd2-a304-b280aae31175", "address": "fa:16:3e:26:34:9a", "network": {"id": "5e9a6068-d0f1-44b2-b2a2-b87ebc0d53f2", "bridge": "br-int", "label": "tempest-ServersTestJSON-2067990706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "544f2a021144492ba1aea46ce6075e53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43121e87-05", "ovs_interfaceid": "43121e87-0514-4fd2-a304-b280aae31175", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1211.351328] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:34:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ed8a78a1-87dc-488e-a092-afd1c2a2ddde', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '43121e87-0514-4fd2-a304-b280aae31175', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1211.359362] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1211.359866] env[69927]: DEBUG oslo_concurrency.lockutils [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "56aec5c2-d344-4a8d-a55a-930bc425150a" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1211.360105] env[69927]: DEBUG oslo_concurrency.lockutils [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "56aec5c2-d344-4a8d-a55a-930bc425150a" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1211.360313] env[69927]: DEBUG nova.compute.manager [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Going to confirm migration 6 {{(pid=69927) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1211.361797] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1211.364602] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-75aa028c-4991-4a49-bdad-8a4c9bad6349 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.391507] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1211.391507] env[69927]: value = "task-4096794" [ 1211.391507] env[69927]: _type = "Task" [ 1211.391507] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.407855] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096794, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.427635] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56a8128c-2af1-4bea-87b6-668165bd3a55 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.436278] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56bfe23a-08ca-4e71-87d1-f1c207fc7c78 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.440272] env[69927]: DEBUG nova.network.neutron [-] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1211.471251] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-50e6092a-78f9-4475-8df1-0cab44a89a7e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1211.472718] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-01612775-0d41-4bf4-b7fe-ae3333063a73 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.477965] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b519455-41aa-47d7-8779-2025a8123867 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.485290] env[69927]: DEBUG oslo_concurrency.lockutils [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "refresh_cache-56aec5c2-d344-4a8d-a55a-930bc425150a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1211.485476] env[69927]: DEBUG oslo_concurrency.lockutils [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquired lock "refresh_cache-56aec5c2-d344-4a8d-a55a-930bc425150a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1211.485653] env[69927]: DEBUG nova.network.neutron [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1211.485840] env[69927]: DEBUG nova.objects.instance [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lazy-loading 'info_cache' on Instance uuid 56aec5c2-d344-4a8d-a55a-930bc425150a {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1211.493813] env[69927]: DEBUG oslo_vmware.api [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096785, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.494074] env[69927]: DEBUG oslo_vmware.api [None req-50e6092a-78f9-4475-8df1-0cab44a89a7e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1211.494074] env[69927]: value = "task-4096795" [ 1211.494074] env[69927]: _type = "Task" [ 1211.494074] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.495705] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-037eac41-c215-484a-891f-5388141a2bcb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.507705] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1211.515867] env[69927]: DEBUG oslo_vmware.api [None req-50e6092a-78f9-4475-8df1-0cab44a89a7e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096795, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.526306] env[69927]: DEBUG nova.compute.provider_tree [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1211.535059] env[69927]: DEBUG nova.compute.manager [req-e4d629a4-5bfa-46f1-87d5-dd6959aa20c2 req-0822cdbb-8c2c-4886-8eb1-3a5687c5d6ba service nova] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Received event network-changed-43121e87-0514-4fd2-a304-b280aae31175 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1211.535408] env[69927]: DEBUG nova.compute.manager [req-e4d629a4-5bfa-46f1-87d5-dd6959aa20c2 req-0822cdbb-8c2c-4886-8eb1-3a5687c5d6ba service nova] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Refreshing instance network info cache due to event network-changed-43121e87-0514-4fd2-a304-b280aae31175. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1211.535606] env[69927]: DEBUG oslo_concurrency.lockutils [req-e4d629a4-5bfa-46f1-87d5-dd6959aa20c2 req-0822cdbb-8c2c-4886-8eb1-3a5687c5d6ba service nova] Acquiring lock "refresh_cache-e38222c4-3362-4d47-aee4-d26ccb4cbf3c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1211.535786] env[69927]: DEBUG oslo_concurrency.lockutils [req-e4d629a4-5bfa-46f1-87d5-dd6959aa20c2 req-0822cdbb-8c2c-4886-8eb1-3a5687c5d6ba service nova] Acquired lock "refresh_cache-e38222c4-3362-4d47-aee4-d26ccb4cbf3c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1211.536041] env[69927]: DEBUG nova.network.neutron [req-e4d629a4-5bfa-46f1-87d5-dd6959aa20c2 req-0822cdbb-8c2c-4886-8eb1-3a5687c5d6ba service nova] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Refreshing network info cache for port 43121e87-0514-4fd2-a304-b280aae31175 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1211.597422] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Task: {'id': task-4096793, 'name': Rename_Task, 'duration_secs': 0.249586} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.598055] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1211.598055] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-baa7c286-706a-42e0-9cd1-dc00782a1717 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.606164] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Waiting for the task: (returnval){ [ 1211.606164] env[69927]: value = "task-4096796" [ 1211.606164] env[69927]: _type = "Task" [ 1211.606164] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.616199] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Task: {'id': task-4096796, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.786023] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1211.907938] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096794, 'name': CreateVM_Task, 'duration_secs': 0.459511} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.908194] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1211.909256] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1211.909512] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1211.909957] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1211.910345] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c48421e-ad89-4928-9c56-82085becbca5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.917506] env[69927]: DEBUG oslo_vmware.api [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1211.917506] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52cfc8a3-6c7c-d69a-a647-7d8b3244ad10" [ 1211.917506] env[69927]: _type = "Task" [ 1211.917506] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.929154] env[69927]: DEBUG oslo_vmware.api [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52cfc8a3-6c7c-d69a-a647-7d8b3244ad10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.942948] env[69927]: INFO nova.compute.manager [-] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Took 1.35 seconds to deallocate network for instance. [ 1211.986037] env[69927]: DEBUG oslo_vmware.api [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096785, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.008549] env[69927]: DEBUG oslo_vmware.api [None req-50e6092a-78f9-4475-8df1-0cab44a89a7e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096795, 'name': PowerOffVM_Task, 'duration_secs': 0.322004} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.008717] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-50e6092a-78f9-4475-8df1-0cab44a89a7e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1212.008760] env[69927]: DEBUG nova.compute.manager [None req-50e6092a-78f9-4475-8df1-0cab44a89a7e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1212.009609] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f52ae2-717d-401e-afd8-4c9d7ec6d6ff {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.017063] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1212.017201] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Cleaning up deleted instances {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1212.028172] env[69927]: DEBUG nova.scheduler.client.report [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1212.118666] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Task: {'id': task-4096796, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.343115] env[69927]: DEBUG nova.network.neutron [req-e4d629a4-5bfa-46f1-87d5-dd6959aa20c2 req-0822cdbb-8c2c-4886-8eb1-3a5687c5d6ba service nova] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Updated VIF entry in instance network info cache for port 43121e87-0514-4fd2-a304-b280aae31175. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1212.343459] env[69927]: DEBUG nova.network.neutron [req-e4d629a4-5bfa-46f1-87d5-dd6959aa20c2 req-0822cdbb-8c2c-4886-8eb1-3a5687c5d6ba service nova] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Updating instance_info_cache with network_info: [{"id": "43121e87-0514-4fd2-a304-b280aae31175", "address": "fa:16:3e:26:34:9a", "network": {"id": "5e9a6068-d0f1-44b2-b2a2-b87ebc0d53f2", "bridge": "br-int", "label": "tempest-ServersTestJSON-2067990706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "544f2a021144492ba1aea46ce6075e53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43121e87-05", "ovs_interfaceid": "43121e87-0514-4fd2-a304-b280aae31175", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1212.434679] env[69927]: DEBUG oslo_vmware.api [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52cfc8a3-6c7c-d69a-a647-7d8b3244ad10, 'name': SearchDatastore_Task, 'duration_secs': 0.010999} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.435169] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1212.435584] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1212.435997] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.436265] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1212.436590] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1212.437011] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae9a242d-9130-412b-9ce8-6fa089ace455 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.448184] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1212.448421] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1212.449243] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40190ba6-c354-4dc9-85b1-7d15c58da95c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.452443] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1212.456593] env[69927]: DEBUG oslo_vmware.api [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1212.456593] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e6ecd7-5a70-e18e-a5d0-f27d64745010" [ 1212.456593] env[69927]: _type = "Task" [ 1212.456593] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.466687] env[69927]: DEBUG oslo_vmware.api [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e6ecd7-5a70-e18e-a5d0-f27d64745010, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.482359] env[69927]: DEBUG oslo_vmware.api [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096785, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.538551] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] There are 50 instances to clean {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1212.538777] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 6828dc80-2e0e-4715-a620-42edbe5eec2f] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1212.546346] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.413s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1212.546853] env[69927]: DEBUG nova.compute.manager [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1212.550502] env[69927]: DEBUG oslo_concurrency.lockutils [None req-50e6092a-78f9-4475-8df1-0cab44a89a7e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "da468d11-82a4-4fec-b06a-1b522bacdbc2" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.125s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1212.551688] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.766s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1212.551938] env[69927]: DEBUG nova.objects.instance [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Lazy-loading 'resources' on Instance uuid 7ff17f1d-31fd-440b-906c-2719770a9151 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1212.619025] env[69927]: DEBUG oslo_vmware.api [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Task: {'id': task-4096796, 'name': PowerOnVM_Task, 'duration_secs': 0.714567} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.619548] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1212.619923] env[69927]: INFO nova.compute.manager [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Took 9.41 seconds to spawn the instance on the hypervisor. [ 1212.621143] env[69927]: DEBUG nova.compute.manager [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1212.621429] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7dccd3-9a2d-436d-8968-60b675ed25a5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.757829] env[69927]: DEBUG nova.network.neutron [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Updating instance_info_cache with network_info: [{"id": "75ee960c-41d4-4858-8b1e-8198b77eb0d7", "address": "fa:16:3e:60:97:a0", "network": {"id": "2cac2664-2b1d-4bb6-a58a-f8e96679d038", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1945522269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3afde63c8cbe4aecb32a470fd6b948f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75ee960c-41", "ovs_interfaceid": "75ee960c-41d4-4858-8b1e-8198b77eb0d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1212.846284] env[69927]: DEBUG oslo_concurrency.lockutils [req-e4d629a4-5bfa-46f1-87d5-dd6959aa20c2 req-0822cdbb-8c2c-4886-8eb1-3a5687c5d6ba service nova] Releasing lock "refresh_cache-e38222c4-3362-4d47-aee4-d26ccb4cbf3c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1212.846614] env[69927]: DEBUG nova.compute.manager [req-e4d629a4-5bfa-46f1-87d5-dd6959aa20c2 req-0822cdbb-8c2c-4886-8eb1-3a5687c5d6ba service nova] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Received event network-vif-deleted-c1c89675-9c86-4cf9-9c34-fdea74b6cf04 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1212.846811] env[69927]: DEBUG nova.compute.manager [req-e4d629a4-5bfa-46f1-87d5-dd6959aa20c2 req-0822cdbb-8c2c-4886-8eb1-3a5687c5d6ba service nova] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Received event network-vif-deleted-eb2105ba-0276-4bc6-a2af-933090d4cdcd {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1212.896526] env[69927]: DEBUG nova.objects.instance [None req-59bcfea7-9d57-4e5c-bcf4-e08dd8a592c8 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lazy-loading 'flavor' on Instance uuid da468d11-82a4-4fec-b06a-1b522bacdbc2 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1212.967798] env[69927]: DEBUG oslo_vmware.api [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e6ecd7-5a70-e18e-a5d0-f27d64745010, 'name': SearchDatastore_Task, 'duration_secs': 0.011669} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.968729] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0350cacf-1f2b-4aeb-b015-850d722a7b2d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.978058] env[69927]: DEBUG oslo_vmware.api [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1212.978058] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52eb8e1b-5d44-c3c2-d7ed-9af917d82a01" [ 1212.978058] env[69927]: _type = "Task" [ 1212.978058] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.985036] env[69927]: DEBUG oslo_vmware.api [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096785, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.990385] env[69927]: DEBUG oslo_vmware.api [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52eb8e1b-5d44-c3c2-d7ed-9af917d82a01, 'name': SearchDatastore_Task, 'duration_secs': 0.010183} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.990695] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1212.990883] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] e38222c4-3362-4d47-aee4-d26ccb4cbf3c/e38222c4-3362-4d47-aee4-d26ccb4cbf3c.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1212.991150] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4c2ed40d-3522-41f8-8133-a12e27623408 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.997897] env[69927]: DEBUG oslo_vmware.api [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1212.997897] env[69927]: value = "task-4096797" [ 1212.997897] env[69927]: _type = "Task" [ 1212.997897] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.007951] env[69927]: DEBUG oslo_vmware.api [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096797, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.043923] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: ba7989a1-a644-4eb7-bf65-20ca1810dd62] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1213.055536] env[69927]: DEBUG nova.compute.utils [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1213.059866] env[69927]: DEBUG nova.compute.manager [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1213.060098] env[69927]: DEBUG nova.network.neutron [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1213.145222] env[69927]: INFO nova.compute.manager [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Took 15.40 seconds to build instance. [ 1213.148836] env[69927]: DEBUG nova.policy [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '181ec10b2e4b4f1794294d18313a5918', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '71a47794e5824701925ad4bdc3651196', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1213.261053] env[69927]: DEBUG oslo_concurrency.lockutils [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Releasing lock "refresh_cache-56aec5c2-d344-4a8d-a55a-930bc425150a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1213.261347] env[69927]: DEBUG nova.objects.instance [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lazy-loading 'migration_context' on Instance uuid 56aec5c2-d344-4a8d-a55a-930bc425150a {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1213.346236] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78061fc1-0251-4828-b87f-41a1b78b6967 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.356696] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ad63a3-9dd1-4ecd-ba0f-4e362ad63720 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.390842] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-220db139-6186-4d1f-88e6-21c44d54e59b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.403333] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65aa07b2-6d27-4992-b78e-333c42fc6170 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.408795] env[69927]: DEBUG oslo_concurrency.lockutils [None req-59bcfea7-9d57-4e5c-bcf4-e08dd8a592c8 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "refresh_cache-da468d11-82a4-4fec-b06a-1b522bacdbc2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1213.409154] env[69927]: DEBUG oslo_concurrency.lockutils [None req-59bcfea7-9d57-4e5c-bcf4-e08dd8a592c8 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired lock "refresh_cache-da468d11-82a4-4fec-b06a-1b522bacdbc2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1213.409546] env[69927]: DEBUG nova.network.neutron [None req-59bcfea7-9d57-4e5c-bcf4-e08dd8a592c8 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1213.409788] env[69927]: DEBUG nova.objects.instance [None req-59bcfea7-9d57-4e5c-bcf4-e08dd8a592c8 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lazy-loading 'info_cache' on Instance uuid da468d11-82a4-4fec-b06a-1b522bacdbc2 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1213.424474] env[69927]: DEBUG nova.compute.provider_tree [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1213.485451] env[69927]: DEBUG oslo_vmware.api [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096785, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.510487] env[69927]: DEBUG oslo_vmware.api [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096797, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488816} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.510811] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] e38222c4-3362-4d47-aee4-d26ccb4cbf3c/e38222c4-3362-4d47-aee4-d26ccb4cbf3c.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1213.511087] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1213.511462] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9d06d67b-2ab6-4402-ab25-f1acf1fe7d6c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.519683] env[69927]: DEBUG oslo_vmware.api [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1213.519683] env[69927]: value = "task-4096798" [ 1213.519683] env[69927]: _type = "Task" [ 1213.519683] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.531022] env[69927]: DEBUG oslo_vmware.api [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096798, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.546992] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 2c0c2704-1ccb-4e1f-95e9-62e44b751cc1] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1213.563566] env[69927]: DEBUG nova.compute.manager [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1213.647145] env[69927]: DEBUG oslo_concurrency.lockutils [None req-56b4a4b2-b8f4-457f-bda5-95654b4bc149 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Lock "2a0a6870-47ad-4958-afed-bdbda3e54c21" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.915s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1213.669722] env[69927]: DEBUG nova.network.neutron [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Successfully created port: e257b275-32fb-40b6-be25-78208eb9442e {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1213.765127] env[69927]: DEBUG nova.objects.base [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Object Instance<56aec5c2-d344-4a8d-a55a-930bc425150a> lazy-loaded attributes: info_cache,migration_context {{(pid=69927) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1213.767398] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ffc590-0cdb-43d6-8d52-ccbf96c64bc7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.791655] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3090e01-ea3e-4072-be79-e240f705c25f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.798479] env[69927]: DEBUG oslo_vmware.api [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1213.798479] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5247cef8-f927-1350-e6b8-473304589db4" [ 1213.798479] env[69927]: _type = "Task" [ 1213.798479] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.809841] env[69927]: DEBUG oslo_vmware.api [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5247cef8-f927-1350-e6b8-473304589db4, 'name': SearchDatastore_Task, 'duration_secs': 0.008745} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.810174] env[69927]: DEBUG oslo_concurrency.lockutils [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.914177] env[69927]: DEBUG nova.objects.base [None req-59bcfea7-9d57-4e5c-bcf4-e08dd8a592c8 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=69927) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1213.927740] env[69927]: DEBUG nova.scheduler.client.report [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1213.985664] env[69927]: DEBUG oslo_vmware.api [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096785, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.029502] env[69927]: DEBUG oslo_vmware.api [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096798, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079002} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.029934] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1214.030564] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c9ac0f4-0cba-45ee-9ea4-a9b8dd89776e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.051269] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 1b22fbb0-8628-4c69-b9b4-d6d294c7458b] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1214.061983] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] e38222c4-3362-4d47-aee4-d26ccb4cbf3c/e38222c4-3362-4d47-aee4-d26ccb4cbf3c.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1214.062590] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee6c1c00-d73b-4fb2-a798-43c5e6e0e3f9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.087818] env[69927]: DEBUG oslo_vmware.api [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1214.087818] env[69927]: value = "task-4096799" [ 1214.087818] env[69927]: _type = "Task" [ 1214.087818] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.099030] env[69927]: DEBUG oslo_vmware.api [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096799, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.099434] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Acquiring lock "2a0a6870-47ad-4958-afed-bdbda3e54c21" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1214.099649] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Lock "2a0a6870-47ad-4958-afed-bdbda3e54c21" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1214.099830] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Acquiring lock "2a0a6870-47ad-4958-afed-bdbda3e54c21-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1214.100040] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Lock "2a0a6870-47ad-4958-afed-bdbda3e54c21-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1214.100215] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Lock "2a0a6870-47ad-4958-afed-bdbda3e54c21-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.102358] env[69927]: INFO nova.compute.manager [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Terminating instance [ 1214.432654] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.881s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.435396] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.983s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1214.435573] env[69927]: DEBUG nova.objects.instance [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lazy-loading 'resources' on Instance uuid cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1214.460795] env[69927]: INFO nova.scheduler.client.report [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Deleted allocations for instance 7ff17f1d-31fd-440b-906c-2719770a9151 [ 1214.487633] env[69927]: DEBUG oslo_vmware.api [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096785, 'name': ReconfigVM_Task, 'duration_secs': 5.803438} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.487886] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1214.488245] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Reconfigured VM to detach interface {{(pid=69927) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1214.563508] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: a9ad5c39-49ed-4061-a48d-1e2ca9a8d5d3] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1214.583260] env[69927]: DEBUG nova.compute.manager [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1214.608527] env[69927]: DEBUG nova.compute.manager [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1214.609027] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1214.609569] env[69927]: DEBUG oslo_vmware.api [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096799, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.613634] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eabf424c-6955-4fc4-a2ce-6954cf6760ca {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.626193] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1214.629705] env[69927]: DEBUG nova.virt.hardware [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1214.630235] env[69927]: DEBUG nova.virt.hardware [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1214.630580] env[69927]: DEBUG nova.virt.hardware [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1214.630958] env[69927]: DEBUG nova.virt.hardware [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1214.631321] env[69927]: DEBUG nova.virt.hardware [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1214.632115] env[69927]: DEBUG nova.virt.hardware [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1214.632115] env[69927]: DEBUG nova.virt.hardware [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1214.632383] env[69927]: DEBUG nova.virt.hardware [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1214.632798] env[69927]: DEBUG nova.virt.hardware [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1214.633272] env[69927]: DEBUG nova.virt.hardware [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1214.633580] env[69927]: DEBUG nova.virt.hardware [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1214.634120] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2471f025-0c9d-4b61-a0c6-2127212b4ac9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.639386] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f29c892f-6a3d-4ba7-839e-04ceecccf6aa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.659774] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1eea370-90f8-4cb3-9083-98cd275d36f3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.664787] env[69927]: DEBUG oslo_vmware.api [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Waiting for the task: (returnval){ [ 1214.664787] env[69927]: value = "task-4096800" [ 1214.664787] env[69927]: _type = "Task" [ 1214.664787] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.685346] env[69927]: DEBUG oslo_vmware.api [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Task: {'id': task-4096800, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.836361] env[69927]: DEBUG nova.network.neutron [None req-59bcfea7-9d57-4e5c-bcf4-e08dd8a592c8 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Updating instance_info_cache with network_info: [{"id": "47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07", "address": "fa:16:3e:ed:aa:67", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47e9852f-a4", "ovs_interfaceid": "47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1214.972692] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e876f092-ee41-4c38-a1ec-b25fe68c3e2e tempest-AttachInterfacesUnderV243Test-1114553105 tempest-AttachInterfacesUnderV243Test-1114553105-project-member] Lock "7ff17f1d-31fd-440b-906c-2719770a9151" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.956s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1215.071043] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: b422d5c9-f580-4d07-9d13-af307571bf48] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1215.109418] env[69927]: DEBUG oslo_vmware.api [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096799, 'name': ReconfigVM_Task, 'duration_secs': 0.69794} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.109736] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Reconfigured VM instance instance-0000006e to attach disk [datastore1] e38222c4-3362-4d47-aee4-d26ccb4cbf3c/e38222c4-3362-4d47-aee4-d26ccb4cbf3c.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1215.111161] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fdd32f94-c8bd-4c1f-ae55-82c17ed3598e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.121907] env[69927]: DEBUG oslo_vmware.api [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1215.121907] env[69927]: value = "task-4096801" [ 1215.121907] env[69927]: _type = "Task" [ 1215.121907] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.134340] env[69927]: DEBUG oslo_vmware.api [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096801, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.178627] env[69927]: DEBUG oslo_vmware.api [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Task: {'id': task-4096800, 'name': PowerOffVM_Task, 'duration_secs': 0.253346} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.178942] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1215.179163] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1215.180324] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6cc2eb3f-83ce-4a19-b048-3bd1d14f3ee4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.218085] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89c4e1e7-a5f6-4775-ae02-8472e93ac6d9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.229289] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b518f778-3a63-4541-9f7c-bb0bbf67eae3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.270753] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c6ce608-aa15-416a-9abf-91baf45e9261 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.274863] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1215.275195] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1215.275461] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Deleting the datastore file [datastore1] 2a0a6870-47ad-4958-afed-bdbda3e54c21 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1215.275779] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-42ab6ebf-82bf-4746-9108-81572602d297 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.285924] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e6a617-8c90-418f-b86d-d4a507beaadd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.290859] env[69927]: DEBUG oslo_vmware.api [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Waiting for the task: (returnval){ [ 1215.290859] env[69927]: value = "task-4096803" [ 1215.290859] env[69927]: _type = "Task" [ 1215.290859] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.305480] env[69927]: DEBUG nova.compute.provider_tree [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1215.316183] env[69927]: DEBUG oslo_vmware.api [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Task: {'id': task-4096803, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.339655] env[69927]: DEBUG oslo_concurrency.lockutils [None req-59bcfea7-9d57-4e5c-bcf4-e08dd8a592c8 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Releasing lock "refresh_cache-da468d11-82a4-4fec-b06a-1b522bacdbc2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1215.434020] env[69927]: DEBUG nova.network.neutron [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Successfully updated port: e257b275-32fb-40b6-be25-78208eb9442e {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1215.481942] env[69927]: DEBUG nova.compute.manager [req-1e9d0d98-3bef-4ddb-8e31-601383054343 req-5ffe680b-8677-440a-89ff-c778697e931c service nova] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Received event network-vif-plugged-e257b275-32fb-40b6-be25-78208eb9442e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1215.482199] env[69927]: DEBUG oslo_concurrency.lockutils [req-1e9d0d98-3bef-4ddb-8e31-601383054343 req-5ffe680b-8677-440a-89ff-c778697e931c service nova] Acquiring lock "afdd23d0-c8e0-4d49-a188-525b6b3f31c8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1215.482355] env[69927]: DEBUG oslo_concurrency.lockutils [req-1e9d0d98-3bef-4ddb-8e31-601383054343 req-5ffe680b-8677-440a-89ff-c778697e931c service nova] Lock "afdd23d0-c8e0-4d49-a188-525b6b3f31c8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1215.482522] env[69927]: DEBUG oslo_concurrency.lockutils [req-1e9d0d98-3bef-4ddb-8e31-601383054343 req-5ffe680b-8677-440a-89ff-c778697e931c service nova] Lock "afdd23d0-c8e0-4d49-a188-525b6b3f31c8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1215.482691] env[69927]: DEBUG nova.compute.manager [req-1e9d0d98-3bef-4ddb-8e31-601383054343 req-5ffe680b-8677-440a-89ff-c778697e931c service nova] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] No waiting events found dispatching network-vif-plugged-e257b275-32fb-40b6-be25-78208eb9442e {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1215.482870] env[69927]: WARNING nova.compute.manager [req-1e9d0d98-3bef-4ddb-8e31-601383054343 req-5ffe680b-8677-440a-89ff-c778697e931c service nova] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Received unexpected event network-vif-plugged-e257b275-32fb-40b6-be25-78208eb9442e for instance with vm_state building and task_state spawning. [ 1215.575081] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 406828cc-c6aa-4686-827d-c7c8e28ffb8e] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1215.631710] env[69927]: DEBUG oslo_vmware.api [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096801, 'name': Rename_Task, 'duration_secs': 0.218669} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.632018] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1215.632297] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9177aef8-cc98-4f70-8c50-ce49755f82d7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.640493] env[69927]: DEBUG oslo_vmware.api [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1215.640493] env[69927]: value = "task-4096804" [ 1215.640493] env[69927]: _type = "Task" [ 1215.640493] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.655585] env[69927]: DEBUG oslo_vmware.api [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096804, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.803076] env[69927]: DEBUG oslo_vmware.api [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Task: {'id': task-4096803, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17453} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.803470] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1215.803534] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1215.803693] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1215.803938] env[69927]: INFO nova.compute.manager [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1215.804251] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1215.804453] env[69927]: DEBUG nova.compute.manager [-] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1215.804546] env[69927]: DEBUG nova.network.neutron [-] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1215.812587] env[69927]: DEBUG nova.scheduler.client.report [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1215.940397] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "refresh_cache-afdd23d0-c8e0-4d49-a188-525b6b3f31c8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1215.940572] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquired lock "refresh_cache-afdd23d0-c8e0-4d49-a188-525b6b3f31c8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1215.940708] env[69927]: DEBUG nova.network.neutron [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1215.977093] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1215.977301] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1215.977478] env[69927]: DEBUG nova.network.neutron [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1216.078330] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: d9347f31-b908-4561-9b57-1ea79b762168] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1216.151646] env[69927]: DEBUG oslo_vmware.api [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096804, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.318139] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.883s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1216.321913] env[69927]: DEBUG oslo_concurrency.lockutils [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.512s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1216.349661] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-59bcfea7-9d57-4e5c-bcf4-e08dd8a592c8 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1216.349661] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-704bc2a8-f5b2-4d53-888f-91d4cc3e7550 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.360421] env[69927]: DEBUG oslo_vmware.api [None req-59bcfea7-9d57-4e5c-bcf4-e08dd8a592c8 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1216.360421] env[69927]: value = "task-4096805" [ 1216.360421] env[69927]: _type = "Task" [ 1216.360421] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.373031] env[69927]: DEBUG oslo_vmware.api [None req-59bcfea7-9d57-4e5c-bcf4-e08dd8a592c8 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096805, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.374946] env[69927]: INFO nova.scheduler.client.report [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Deleted allocations for instance cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a [ 1216.496257] env[69927]: DEBUG nova.network.neutron [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1216.582614] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: cba314de-644e-451e-8ecc-2e209d74bbce] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1216.655547] env[69927]: DEBUG oslo_vmware.api [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096804, 'name': PowerOnVM_Task, 'duration_secs': 0.972699} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.655836] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1216.656097] env[69927]: INFO nova.compute.manager [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Took 8.38 seconds to spawn the instance on the hypervisor. [ 1216.659253] env[69927]: DEBUG nova.compute.manager [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1216.659253] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1f35ea-d2c3-4247-a145-9d931014d5c8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.849820] env[69927]: INFO nova.network.neutron [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Port 70d5cdb3-0681-41ab-aa95-e5ae4a5245f3 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1216.850238] env[69927]: DEBUG nova.network.neutron [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Updating instance_info_cache with network_info: [{"id": "b92f830b-5eef-4260-a56b-94af4a4ec679", "address": "fa:16:3e:a4:e7:1b", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb92f830b-5e", "ovs_interfaceid": "b92f830b-5eef-4260-a56b-94af4a4ec679", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1216.858114] env[69927]: DEBUG nova.network.neutron [-] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1216.879575] env[69927]: DEBUG oslo_vmware.api [None req-59bcfea7-9d57-4e5c-bcf4-e08dd8a592c8 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096805, 'name': PowerOnVM_Task, 'duration_secs': 0.509583} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.879575] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-59bcfea7-9d57-4e5c-bcf4-e08dd8a592c8 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1216.879575] env[69927]: DEBUG nova.compute.manager [None req-59bcfea7-9d57-4e5c-bcf4-e08dd8a592c8 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1216.879575] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb59d6c-9899-4231-a2e1-f23bb870c910 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.888663] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7800d223-5153-458e-9b29-5886143771ca tempest-ServersNegativeTestJSON-1512944231 tempest-ServersNegativeTestJSON-1512944231-project-member] Lock "cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.978s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1217.068557] env[69927]: DEBUG nova.network.neutron [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Updating instance_info_cache with network_info: [{"id": "e257b275-32fb-40b6-be25-78208eb9442e", "address": "fa:16:3e:de:10:f7", "network": {"id": "095e5f02-dbfd-4d96-bd31-c8c5e870e449", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-240997183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "71a47794e5824701925ad4bdc3651196", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7b7edd0-124a-48ec-ae26-1aa14f9b884a", "external-id": "nsx-vlan-transportzone-861", "segmentation_id": 861, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape257b275-32", "ovs_interfaceid": "e257b275-32fb-40b6-be25-78208eb9442e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1217.086352] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 9aa0a285-66e4-4792-bbe9-a62f76666ec6] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1217.123095] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b33298f5-8e48-4dbd-a2ae-320ee6d71699 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.131798] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f48a42-c43e-4bf7-8e48-10162d5e5894 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.194388] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f025afdd-843d-41dd-b35d-e9d8d16d6d12 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.210285] env[69927]: INFO nova.compute.manager [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Took 13.43 seconds to build instance. [ 1217.224515] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb63ea58-78ae-4d27-9376-daa428d7337c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.249330] env[69927]: DEBUG nova.compute.provider_tree [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1217.337718] env[69927]: DEBUG nova.compute.manager [req-f5307b28-ba93-46b7-aec1-b81bbadf0e08 req-60a5d71a-0a6e-45f2-826b-55822d69b28d service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Received event network-changed-b92f830b-5eef-4260-a56b-94af4a4ec679 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1217.337909] env[69927]: DEBUG nova.compute.manager [req-f5307b28-ba93-46b7-aec1-b81bbadf0e08 req-60a5d71a-0a6e-45f2-826b-55822d69b28d service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Refreshing instance network info cache due to event network-changed-b92f830b-5eef-4260-a56b-94af4a4ec679. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1217.338228] env[69927]: DEBUG oslo_concurrency.lockutils [req-f5307b28-ba93-46b7-aec1-b81bbadf0e08 req-60a5d71a-0a6e-45f2-826b-55822d69b28d service nova] Acquiring lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1217.360381] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1217.362583] env[69927]: INFO nova.compute.manager [-] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Took 1.56 seconds to deallocate network for instance. [ 1217.362884] env[69927]: DEBUG oslo_concurrency.lockutils [req-f5307b28-ba93-46b7-aec1-b81bbadf0e08 req-60a5d71a-0a6e-45f2-826b-55822d69b28d service nova] Acquired lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1217.363081] env[69927]: DEBUG nova.network.neutron [req-f5307b28-ba93-46b7-aec1-b81bbadf0e08 req-60a5d71a-0a6e-45f2-826b-55822d69b28d service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Refreshing network info cache for port b92f830b-5eef-4260-a56b-94af4a4ec679 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1217.577236] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Releasing lock "refresh_cache-afdd23d0-c8e0-4d49-a188-525b6b3f31c8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1217.577236] env[69927]: DEBUG nova.compute.manager [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Instance network_info: |[{"id": "e257b275-32fb-40b6-be25-78208eb9442e", "address": "fa:16:3e:de:10:f7", "network": {"id": "095e5f02-dbfd-4d96-bd31-c8c5e870e449", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-240997183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "71a47794e5824701925ad4bdc3651196", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7b7edd0-124a-48ec-ae26-1aa14f9b884a", "external-id": "nsx-vlan-transportzone-861", "segmentation_id": 861, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape257b275-32", "ovs_interfaceid": "e257b275-32fb-40b6-be25-78208eb9442e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1217.577236] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:10:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b7b7edd0-124a-48ec-ae26-1aa14f9b884a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e257b275-32fb-40b6-be25-78208eb9442e', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1217.585971] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1217.587684] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1217.588334] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e0802ea-9ede-4545-adda-0f2b35013524 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.606696] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 2ae5fcf7-3111-4e80-80b0-f9c1cece1001] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1217.614870] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1217.614870] env[69927]: value = "task-4096806" [ 1217.614870] env[69927]: _type = "Task" [ 1217.614870] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.626508] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096806, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.722818] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ffa0a9f1-94a3-4dca-aaa1-d369cb8f6285 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "e38222c4-3362-4d47-aee4-d26ccb4cbf3c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.958s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1217.756675] env[69927]: DEBUG nova.scheduler.client.report [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1217.870067] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a68f93db-e130-4a2d-a8f0-5e0984dc68f9 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "interface-d548ea75-9c1f-4884-b338-194f1b5d62ef-70d5cdb3-0681-41ab-aa95-e5ae4a5245f3" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.027s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1217.871666] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1217.986609] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52433eaf-80b6-7164-2506-aace7db5555d/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1217.987756] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973bc480-a988-4aab-8564-dd9be374b386 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.994411] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52433eaf-80b6-7164-2506-aace7db5555d/disk-0.vmdk is in state: ready. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1217.994590] env[69927]: ERROR oslo_vmware.rw_handles [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52433eaf-80b6-7164-2506-aace7db5555d/disk-0.vmdk due to incomplete transfer. [ 1217.994831] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b6bf4e0d-a74e-4a0a-8dba-b360a6e14eb9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.005178] env[69927]: DEBUG oslo_vmware.rw_handles [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52433eaf-80b6-7164-2506-aace7db5555d/disk-0.vmdk. {{(pid=69927) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1218.005487] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Uploaded image 87cd0321-c9d5-427e-8af6-c3bd78649765 to the Glance image server {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1218.007970] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Destroying the VM {{(pid=69927) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1218.008279] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5bf624e2-e6d7-4b10-baac-d5032480783a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.015363] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1218.015363] env[69927]: value = "task-4096807" [ 1218.015363] env[69927]: _type = "Task" [ 1218.015363] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.025021] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096807, 'name': Destroy_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.072230] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "interface-5c87c74d-5998-4dfc-bc3c-c2887ff25195-70d5cdb3-0681-41ab-aa95-e5ae4a5245f3" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1218.072609] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "interface-5c87c74d-5998-4dfc-bc3c-c2887ff25195-70d5cdb3-0681-41ab-aa95-e5ae4a5245f3" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1218.073166] env[69927]: DEBUG nova.objects.instance [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lazy-loading 'flavor' on Instance uuid 5c87c74d-5998-4dfc-bc3c-c2887ff25195 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1218.109504] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 0833b39e-2aad-4f9d-973d-2ef1ae0ca6f5] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1218.112598] env[69927]: DEBUG nova.network.neutron [req-f5307b28-ba93-46b7-aec1-b81bbadf0e08 req-60a5d71a-0a6e-45f2-826b-55822d69b28d service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Updated VIF entry in instance network info cache for port b92f830b-5eef-4260-a56b-94af4a4ec679. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1218.113222] env[69927]: DEBUG nova.network.neutron [req-f5307b28-ba93-46b7-aec1-b81bbadf0e08 req-60a5d71a-0a6e-45f2-826b-55822d69b28d service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Updating instance_info_cache with network_info: [{"id": "b92f830b-5eef-4260-a56b-94af4a4ec679", "address": "fa:16:3e:a4:e7:1b", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb92f830b-5e", "ovs_interfaceid": "b92f830b-5eef-4260-a56b-94af4a4ec679", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1218.131677] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096806, 'name': CreateVM_Task, 'duration_secs': 0.394193} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.131890] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1218.132633] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1218.132921] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1218.133157] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1218.134270] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c4b6803-d5a1-45a4-9aad-005eeebe87cc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.142637] env[69927]: DEBUG oslo_vmware.api [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1218.142637] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5241ecca-3074-9cac-c99a-94846958d7f3" [ 1218.142637] env[69927]: _type = "Task" [ 1218.142637] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.151921] env[69927]: DEBUG oslo_vmware.api [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5241ecca-3074-9cac-c99a-94846958d7f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.526311] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096807, 'name': Destroy_Task} progress is 33%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.620742] env[69927]: DEBUG oslo_concurrency.lockutils [req-f5307b28-ba93-46b7-aec1-b81bbadf0e08 req-60a5d71a-0a6e-45f2-826b-55822d69b28d service nova] Releasing lock "refresh_cache-d548ea75-9c1f-4884-b338-194f1b5d62ef" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1218.621302] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: dd4c3963-aa58-49f2-b675-9863ff13bddf] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1218.656262] env[69927]: DEBUG oslo_vmware.api [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5241ecca-3074-9cac-c99a-94846958d7f3, 'name': SearchDatastore_Task, 'duration_secs': 0.010512} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.656347] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1218.656602] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1218.658000] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1218.658000] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1218.658000] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1218.658000] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43222e7a-16aa-44bf-a18f-df4ea904f368 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.671843] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1218.671970] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1218.672759] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26085ae6-1d71-4dc9-a02e-26d66ea313c2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.680626] env[69927]: DEBUG oslo_vmware.api [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1218.680626] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526f3ec6-c992-5479-95f4-0da24970f999" [ 1218.680626] env[69927]: _type = "Task" [ 1218.680626] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.693805] env[69927]: DEBUG oslo_vmware.api [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526f3ec6-c992-5479-95f4-0da24970f999, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.727265] env[69927]: DEBUG nova.objects.instance [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lazy-loading 'pci_requests' on Instance uuid 5c87c74d-5998-4dfc-bc3c-c2887ff25195 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1218.768496] env[69927]: DEBUG oslo_concurrency.lockutils [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.446s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1218.771846] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.900s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1218.772110] env[69927]: DEBUG nova.objects.instance [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Lazy-loading 'resources' on Instance uuid 2a0a6870-47ad-4958-afed-bdbda3e54c21 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1219.027993] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096807, 'name': Destroy_Task, 'duration_secs': 0.663194} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.028293] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Destroyed the VM [ 1219.028554] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Deleting Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1219.028901] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-05be6c87-aaf5-478d-97ff-22a79b3856b9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.035844] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1219.035844] env[69927]: value = "task-4096808" [ 1219.035844] env[69927]: _type = "Task" [ 1219.035844] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.045821] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096808, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.124239] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: c6f166c7-538f-4c8a-9500-48319c694ea0] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1219.192310] env[69927]: DEBUG oslo_vmware.api [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526f3ec6-c992-5479-95f4-0da24970f999, 'name': SearchDatastore_Task, 'duration_secs': 0.016554} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.193208] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf33759c-23cd-4797-b81c-f9944ae559b2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.198986] env[69927]: DEBUG oslo_vmware.api [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1219.198986] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529baaac-5858-7783-b875-0786af983d70" [ 1219.198986] env[69927]: _type = "Task" [ 1219.198986] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.208071] env[69927]: DEBUG oslo_vmware.api [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529baaac-5858-7783-b875-0786af983d70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.230534] env[69927]: DEBUG nova.objects.base [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Object Instance<5c87c74d-5998-4dfc-bc3c-c2887ff25195> lazy-loaded attributes: flavor,pci_requests {{(pid=69927) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1219.230753] env[69927]: DEBUG nova.network.neutron [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1219.321573] env[69927]: DEBUG nova.policy [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ed20f23b4104e2ea75ea29b804c79d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ed984d7170742eca7e89bf3bf45e6ae', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1219.339380] env[69927]: INFO nova.scheduler.client.report [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Deleted allocation for migration d6355cd8-1fbd-4455-b64b-c3b173c51f88 [ 1219.513765] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-158d78e6-ff09-4080-b738-3d74a34b3400 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.524398] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f52f69a-8278-487e-8a8e-c86e94174cea {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.562352] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5540192-0500-434d-9e44-5f30f47708e7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.570546] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096808, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.573899] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afe8fc1e-4c30-4d21-be18-cb138527b164 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.590718] env[69927]: DEBUG nova.compute.provider_tree [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1219.627958] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 0f5643d4-52f3-4cba-b71b-9c4370175e35] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1219.710461] env[69927]: DEBUG oslo_vmware.api [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529baaac-5858-7783-b875-0786af983d70, 'name': SearchDatastore_Task, 'duration_secs': 0.013786} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.710774] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1219.710949] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] afdd23d0-c8e0-4d49-a188-525b6b3f31c8/afdd23d0-c8e0-4d49-a188-525b6b3f31c8.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1219.711294] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4eec16d1-6f7c-4729-a9aa-2d8e1cd694a4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.719621] env[69927]: DEBUG oslo_vmware.api [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1219.719621] env[69927]: value = "task-4096809" [ 1219.719621] env[69927]: _type = "Task" [ 1219.719621] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.728522] env[69927]: DEBUG oslo_vmware.api [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096809, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.846937] env[69927]: DEBUG oslo_concurrency.lockutils [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "56aec5c2-d344-4a8d-a55a-930bc425150a" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.487s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1220.078150] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096808, 'name': RemoveSnapshot_Task, 'duration_secs': 0.635542} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.079033] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Deleted Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1220.079281] env[69927]: DEBUG nova.compute.manager [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1220.080897] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f1584fe-5371-49b1-94e0-4930e379920b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.095843] env[69927]: DEBUG nova.scheduler.client.report [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1220.131705] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 20ac32b7-51fc-40bf-a667-2aeb6c8c7648] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1220.233457] env[69927]: DEBUG oslo_vmware.api [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096809, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.603051] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.831s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1220.605252] env[69927]: INFO nova.compute.manager [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Shelve offloading [ 1220.627651] env[69927]: INFO nova.scheduler.client.report [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Deleted allocations for instance 2a0a6870-47ad-4958-afed-bdbda3e54c21 [ 1220.635910] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 3936a3db-4afa-4a37-9d63-8c18b6b72c72] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1220.731717] env[69927]: DEBUG oslo_vmware.api [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096809, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.546229} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.731881] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] afdd23d0-c8e0-4d49-a188-525b6b3f31c8/afdd23d0-c8e0-4d49-a188-525b6b3f31c8.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1220.731975] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1220.732253] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3b1872d6-4759-496d-a707-eaa67f23a0fd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.741083] env[69927]: DEBUG oslo_vmware.api [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1220.741083] env[69927]: value = "task-4096810" [ 1220.741083] env[69927]: _type = "Task" [ 1220.741083] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.748450] env[69927]: DEBUG oslo_vmware.api [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096810, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.791136] env[69927]: DEBUG nova.compute.manager [req-bbd9cb5c-7a1f-4b42-8706-37e4efee0af5 req-b7082624-c2e4-4017-8e51-5fc3a7587d46 service nova] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Received event network-changed-e257b275-32fb-40b6-be25-78208eb9442e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1220.791327] env[69927]: DEBUG nova.compute.manager [req-bbd9cb5c-7a1f-4b42-8706-37e4efee0af5 req-b7082624-c2e4-4017-8e51-5fc3a7587d46 service nova] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Refreshing instance network info cache due to event network-changed-e257b275-32fb-40b6-be25-78208eb9442e. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1220.791567] env[69927]: DEBUG oslo_concurrency.lockutils [req-bbd9cb5c-7a1f-4b42-8706-37e4efee0af5 req-b7082624-c2e4-4017-8e51-5fc3a7587d46 service nova] Acquiring lock "refresh_cache-afdd23d0-c8e0-4d49-a188-525b6b3f31c8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1220.791728] env[69927]: DEBUG oslo_concurrency.lockutils [req-bbd9cb5c-7a1f-4b42-8706-37e4efee0af5 req-b7082624-c2e4-4017-8e51-5fc3a7587d46 service nova] Acquired lock "refresh_cache-afdd23d0-c8e0-4d49-a188-525b6b3f31c8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1220.791904] env[69927]: DEBUG nova.network.neutron [req-bbd9cb5c-7a1f-4b42-8706-37e4efee0af5 req-b7082624-c2e4-4017-8e51-5fc3a7587d46 service nova] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Refreshing network info cache for port e257b275-32fb-40b6-be25-78208eb9442e {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1220.882411] env[69927]: DEBUG oslo_concurrency.lockutils [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "56aec5c2-d344-4a8d-a55a-930bc425150a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1220.887338] env[69927]: DEBUG oslo_concurrency.lockutils [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "56aec5c2-d344-4a8d-a55a-930bc425150a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1220.887338] env[69927]: DEBUG oslo_concurrency.lockutils [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "56aec5c2-d344-4a8d-a55a-930bc425150a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1220.887338] env[69927]: DEBUG oslo_concurrency.lockutils [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "56aec5c2-d344-4a8d-a55a-930bc425150a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1220.887338] env[69927]: DEBUG oslo_concurrency.lockutils [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "56aec5c2-d344-4a8d-a55a-930bc425150a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1220.888822] env[69927]: INFO nova.compute.manager [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Terminating instance [ 1220.996663] env[69927]: DEBUG nova.network.neutron [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Successfully updated port: 70d5cdb3-0681-41ab-aa95-e5ae4a5245f3 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1221.109446] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1221.109797] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ab8cd01b-e5bf-439f-bd85-380faad74882 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.118171] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1221.118171] env[69927]: value = "task-4096811" [ 1221.118171] env[69927]: _type = "Task" [ 1221.118171] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.130516] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] VM already powered off {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1221.131707] env[69927]: DEBUG nova.compute.manager [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1221.136300] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f8edf3-63c8-42f9-8c50-ab738ecfc70b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.142227] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8b336652-1577-47ea-bd4d-658b4261fb24 tempest-InstanceActionsNegativeTestJSON-2118782426 tempest-InstanceActionsNegativeTestJSON-2118782426-project-member] Lock "2a0a6870-47ad-4958-afed-bdbda3e54c21" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.042s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1221.146534] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: b750ce2c-ee85-46c6-bf12-edb3f088e6de] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1221.153360] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "refresh_cache-693a6c6b-8d1c-405e-bb17-73259e28f556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1221.153536] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquired lock "refresh_cache-693a6c6b-8d1c-405e-bb17-73259e28f556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1221.153708] env[69927]: DEBUG nova.network.neutron [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1221.252317] env[69927]: DEBUG oslo_vmware.api [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096810, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077919} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.252696] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1221.253822] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef16dcc7-208b-4d4f-b5e4-84e6c215b57d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.278470] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] afdd23d0-c8e0-4d49-a188-525b6b3f31c8/afdd23d0-c8e0-4d49-a188-525b6b3f31c8.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1221.278783] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a0a1734-c123-48b3-afcf-c88573a411ea {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.298290] env[69927]: DEBUG oslo_vmware.api [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1221.298290] env[69927]: value = "task-4096812" [ 1221.298290] env[69927]: _type = "Task" [ 1221.298290] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.306900] env[69927]: DEBUG oslo_vmware.api [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096812, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.395035] env[69927]: DEBUG nova.compute.manager [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1221.395341] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1221.396410] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcbe9913-eb7a-4df3-82b7-d6760d361db1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.405491] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1221.405859] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b1e041f-efa2-4f77-a6b1-3d6f3487ec4b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.419115] env[69927]: DEBUG oslo_vmware.api [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1221.419115] env[69927]: value = "task-4096813" [ 1221.419115] env[69927]: _type = "Task" [ 1221.419115] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.431033] env[69927]: DEBUG oslo_vmware.api [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096813, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.504227] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "refresh_cache-5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1221.504567] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "refresh_cache-5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1221.504887] env[69927]: DEBUG nova.network.neutron [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1221.604029] env[69927]: DEBUG nova.network.neutron [req-bbd9cb5c-7a1f-4b42-8706-37e4efee0af5 req-b7082624-c2e4-4017-8e51-5fc3a7587d46 service nova] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Updated VIF entry in instance network info cache for port e257b275-32fb-40b6-be25-78208eb9442e. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1221.604486] env[69927]: DEBUG nova.network.neutron [req-bbd9cb5c-7a1f-4b42-8706-37e4efee0af5 req-b7082624-c2e4-4017-8e51-5fc3a7587d46 service nova] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Updating instance_info_cache with network_info: [{"id": "e257b275-32fb-40b6-be25-78208eb9442e", "address": "fa:16:3e:de:10:f7", "network": {"id": "095e5f02-dbfd-4d96-bd31-c8c5e870e449", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-240997183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "71a47794e5824701925ad4bdc3651196", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7b7edd0-124a-48ec-ae26-1aa14f9b884a", "external-id": "nsx-vlan-transportzone-861", "segmentation_id": 861, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape257b275-32", "ovs_interfaceid": "e257b275-32fb-40b6-be25-78208eb9442e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1221.650216] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: ff8b87a7-042d-4867-b4f4-cbe0fa1cb9d2] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1221.664671] env[69927]: DEBUG nova.compute.manager [req-59a8bcdc-59a5-4d00-9607-c0ee3ce1719e req-ea2f26c4-5ce2-4e4e-b2aa-d86fc9415c01 service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Received event network-changed-a54251d6-cc17-4c26-95aa-a11a00c08e5e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1221.664899] env[69927]: DEBUG nova.compute.manager [req-59a8bcdc-59a5-4d00-9607-c0ee3ce1719e req-ea2f26c4-5ce2-4e4e-b2aa-d86fc9415c01 service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Refreshing instance network info cache due to event network-changed-a54251d6-cc17-4c26-95aa-a11a00c08e5e. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1221.665173] env[69927]: DEBUG oslo_concurrency.lockutils [req-59a8bcdc-59a5-4d00-9607-c0ee3ce1719e req-ea2f26c4-5ce2-4e4e-b2aa-d86fc9415c01 service nova] Acquiring lock "refresh_cache-5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1221.808582] env[69927]: DEBUG oslo_vmware.api [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096812, 'name': ReconfigVM_Task, 'duration_secs': 0.317058} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.809193] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Reconfigured VM instance instance-0000006f to attach disk [datastore2] afdd23d0-c8e0-4d49-a188-525b6b3f31c8/afdd23d0-c8e0-4d49-a188-525b6b3f31c8.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1221.809554] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f6476cfd-03a5-488e-968d-9127e7072849 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.817574] env[69927]: DEBUG oslo_vmware.api [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1221.817574] env[69927]: value = "task-4096814" [ 1221.817574] env[69927]: _type = "Task" [ 1221.817574] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.827652] env[69927]: DEBUG oslo_vmware.api [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096814, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.931115] env[69927]: DEBUG oslo_vmware.api [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096813, 'name': PowerOffVM_Task, 'duration_secs': 0.388887} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.931115] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1221.931235] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1221.931633] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2bdea418-ba36-4382-abfd-363995639dd5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.018611] env[69927]: DEBUG nova.network.neutron [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Updating instance_info_cache with network_info: [{"id": "d201dadc-ab89-4ede-8c29-41217e3af341", "address": "fa:16:3e:6b:c9:4b", "network": {"id": "4a19c4d6-f840-486a-b113-3ec076b5b34d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-437129762-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.157", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b7ae5270b0643e6b5720d4f2f765d74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "510d3c47-3615-43d5-aa5d-a279fd915e71", "external-id": "nsx-vlan-transportzone-436", "segmentation_id": 436, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd201dadc-ab", "ovs_interfaceid": "d201dadc-ab89-4ede-8c29-41217e3af341", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1222.060564] env[69927]: WARNING nova.network.neutron [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] b8b342c3-e0d7-4186-9541-03e865142f8a already exists in list: networks containing: ['b8b342c3-e0d7-4186-9541-03e865142f8a']. ignoring it [ 1222.112019] env[69927]: DEBUG oslo_concurrency.lockutils [req-bbd9cb5c-7a1f-4b42-8706-37e4efee0af5 req-b7082624-c2e4-4017-8e51-5fc3a7587d46 service nova] Releasing lock "refresh_cache-afdd23d0-c8e0-4d49-a188-525b6b3f31c8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1222.112019] env[69927]: DEBUG nova.compute.manager [req-bbd9cb5c-7a1f-4b42-8706-37e4efee0af5 req-b7082624-c2e4-4017-8e51-5fc3a7587d46 service nova] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Received event network-vif-deleted-43c0ab61-f8c2-4ed2-8aa0-effc43628918 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1222.155859] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 0c8e43a3-3f33-4a41-81d3-a98565dca4a7] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1222.157587] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1222.157801] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1222.158049] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Deleting the datastore file [datastore1] 56aec5c2-d344-4a8d-a55a-930bc425150a {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1222.158355] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d45be0f-c0b9-41a8-9736-29b4204721c7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.166928] env[69927]: DEBUG oslo_vmware.api [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for the task: (returnval){ [ 1222.166928] env[69927]: value = "task-4096816" [ 1222.166928] env[69927]: _type = "Task" [ 1222.166928] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.180512] env[69927]: DEBUG oslo_vmware.api [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096816, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.294779] env[69927]: DEBUG oslo_concurrency.lockutils [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1222.294961] env[69927]: DEBUG oslo_concurrency.lockutils [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1222.330067] env[69927]: DEBUG oslo_vmware.api [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096814, 'name': Rename_Task, 'duration_secs': 0.148089} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.330378] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1222.330627] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0b09d575-5c7c-4c52-8d6a-2ad5d669ed1b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.339214] env[69927]: DEBUG oslo_vmware.api [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1222.339214] env[69927]: value = "task-4096817" [ 1222.339214] env[69927]: _type = "Task" [ 1222.339214] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.348416] env[69927]: DEBUG oslo_vmware.api [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096817, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.472494] env[69927]: DEBUG nova.network.neutron [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Updating instance_info_cache with network_info: [{"id": "a54251d6-cc17-4c26-95aa-a11a00c08e5e", "address": "fa:16:3e:58:62:a1", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa54251d6-cc", "ovs_interfaceid": "a54251d6-cc17-4c26-95aa-a11a00c08e5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "70d5cdb3-0681-41ab-aa95-e5ae4a5245f3", "address": "fa:16:3e:32:88:2b", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70d5cdb3-06", "ovs_interfaceid": "70d5cdb3-0681-41ab-aa95-e5ae4a5245f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1222.525266] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Releasing lock "refresh_cache-693a6c6b-8d1c-405e-bb17-73259e28f556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1222.658363] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 93d19a66-f00e-4fa8-9eed-32035b020ba2] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1222.681815] env[69927]: DEBUG oslo_vmware.api [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Task: {'id': task-4096816, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145245} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.682181] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1222.682370] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1222.682594] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1222.683849] env[69927]: INFO nova.compute.manager [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Took 1.29 seconds to destroy the instance on the hypervisor. [ 1222.684181] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1222.684575] env[69927]: DEBUG nova.compute.manager [-] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1222.684575] env[69927]: DEBUG nova.network.neutron [-] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1222.798609] env[69927]: DEBUG nova.compute.manager [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1222.863296] env[69927]: DEBUG oslo_vmware.api [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096817, 'name': PowerOnVM_Task, 'duration_secs': 0.501247} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.863571] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1222.863775] env[69927]: INFO nova.compute.manager [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Took 8.28 seconds to spawn the instance on the hypervisor. [ 1222.863951] env[69927]: DEBUG nova.compute.manager [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1222.866285] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7f07b8e-1821-41f9-90e1-984487d8d147 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.943381] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1222.944302] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b574ec3-2cb7-4fa5-80b9-c2965bbe90ac {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.955768] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1222.956142] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6a6018db-ee04-44c8-b61f-6b7c500fa380 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.976640] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "refresh_cache-5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1222.977409] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1222.977570] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1222.977862] env[69927]: DEBUG oslo_concurrency.lockutils [req-59a8bcdc-59a5-4d00-9607-c0ee3ce1719e req-ea2f26c4-5ce2-4e4e-b2aa-d86fc9415c01 service nova] Acquired lock "refresh_cache-5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1222.978063] env[69927]: DEBUG nova.network.neutron [req-59a8bcdc-59a5-4d00-9607-c0ee3ce1719e req-ea2f26c4-5ce2-4e4e-b2aa-d86fc9415c01 service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Refreshing network info cache for port a54251d6-cc17-4c26-95aa-a11a00c08e5e {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1222.981098] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fdb0a84-93ec-42d1-b031-bf1260157b65 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.004580] env[69927]: DEBUG nova.virt.hardware [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1223.004580] env[69927]: DEBUG nova.virt.hardware [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1223.004580] env[69927]: DEBUG nova.virt.hardware [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1223.004580] env[69927]: DEBUG nova.virt.hardware [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1223.004580] env[69927]: DEBUG nova.virt.hardware [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1223.004580] env[69927]: DEBUG nova.virt.hardware [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1223.004838] env[69927]: DEBUG nova.virt.hardware [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1223.004838] env[69927]: DEBUG nova.virt.hardware [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1223.005011] env[69927]: DEBUG nova.virt.hardware [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1223.005207] env[69927]: DEBUG nova.virt.hardware [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1223.005355] env[69927]: DEBUG nova.virt.hardware [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1223.012234] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Reconfiguring VM to attach interface {{(pid=69927) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1223.013677] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5502cda3-aa56-4907-a6e7-645fb3625828 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.033103] env[69927]: DEBUG oslo_vmware.api [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1223.033103] env[69927]: value = "task-4096819" [ 1223.033103] env[69927]: _type = "Task" [ 1223.033103] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.046747] env[69927]: DEBUG oslo_vmware.api [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096819, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.051377] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1223.054020] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1223.054020] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Deleting the datastore file [datastore2] 693a6c6b-8d1c-405e-bb17-73259e28f556 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1223.054020] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-380d7251-095e-4503-89f0-f8b571303abe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.059484] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1223.059484] env[69927]: value = "task-4096820" [ 1223.059484] env[69927]: _type = "Task" [ 1223.059484] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.070114] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096820, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.155351] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "659e2584-88a8-4382-98c8-f50fcab78e0c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1223.155663] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "659e2584-88a8-4382-98c8-f50fcab78e0c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1223.161562] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 4b7934f8-2c97-480b-8af7-f09f6819e2b6] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1223.324349] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "e38222c4-3362-4d47-aee4-d26ccb4cbf3c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1223.324502] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "e38222c4-3362-4d47-aee4-d26ccb4cbf3c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1223.324710] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "e38222c4-3362-4d47-aee4-d26ccb4cbf3c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1223.324807] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "e38222c4-3362-4d47-aee4-d26ccb4cbf3c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1223.325086] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "e38222c4-3362-4d47-aee4-d26ccb4cbf3c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1223.328510] env[69927]: DEBUG oslo_concurrency.lockutils [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1223.328510] env[69927]: DEBUG oslo_concurrency.lockutils [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1223.329980] env[69927]: INFO nova.compute.claims [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1223.332902] env[69927]: INFO nova.compute.manager [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Terminating instance [ 1223.388150] env[69927]: INFO nova.compute.manager [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Took 15.93 seconds to build instance. [ 1223.544287] env[69927]: DEBUG oslo_vmware.api [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096819, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.569095] env[69927]: DEBUG oslo_vmware.api [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096820, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.235255} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.569367] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1223.569554] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1223.569729] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1223.594460] env[69927]: INFO nova.scheduler.client.report [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Deleted allocations for instance 693a6c6b-8d1c-405e-bb17-73259e28f556 [ 1223.658811] env[69927]: DEBUG nova.compute.manager [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1223.668443] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 7554b5e2-dcc3-421f-9fe9-a309c9aa03b7] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1223.689838] env[69927]: DEBUG nova.network.neutron [-] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1223.838867] env[69927]: DEBUG nova.compute.manager [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1223.839756] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1223.841290] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e1b56b7-d4d9-46a5-9000-16f55b60d4c9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.852825] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1223.853109] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6c2c187d-a4a1-48c4-9ed3-b5d9960335be {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.861778] env[69927]: DEBUG oslo_vmware.api [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1223.861778] env[69927]: value = "task-4096821" [ 1223.861778] env[69927]: _type = "Task" [ 1223.861778] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.878472] env[69927]: DEBUG oslo_vmware.api [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096821, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.892810] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b4e4535b-bb27-4187-ae02-b28cc7f472f2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "afdd23d0-c8e0-4d49-a188-525b6b3f31c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.443s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1224.004917] env[69927]: DEBUG nova.compute.manager [req-1395baea-5ff2-4598-9ae4-fb74561627e6 req-617945da-7e07-46dd-a26b-bbc17392c38d service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Received event network-vif-plugged-70d5cdb3-0681-41ab-aa95-e5ae4a5245f3 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1224.005033] env[69927]: DEBUG oslo_concurrency.lockutils [req-1395baea-5ff2-4598-9ae4-fb74561627e6 req-617945da-7e07-46dd-a26b-bbc17392c38d service nova] Acquiring lock "5c87c74d-5998-4dfc-bc3c-c2887ff25195-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1224.005285] env[69927]: DEBUG oslo_concurrency.lockutils [req-1395baea-5ff2-4598-9ae4-fb74561627e6 req-617945da-7e07-46dd-a26b-bbc17392c38d service nova] Lock "5c87c74d-5998-4dfc-bc3c-c2887ff25195-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1224.005489] env[69927]: DEBUG oslo_concurrency.lockutils [req-1395baea-5ff2-4598-9ae4-fb74561627e6 req-617945da-7e07-46dd-a26b-bbc17392c38d service nova] Lock "5c87c74d-5998-4dfc-bc3c-c2887ff25195-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1224.005749] env[69927]: DEBUG nova.compute.manager [req-1395baea-5ff2-4598-9ae4-fb74561627e6 req-617945da-7e07-46dd-a26b-bbc17392c38d service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] No waiting events found dispatching network-vif-plugged-70d5cdb3-0681-41ab-aa95-e5ae4a5245f3 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1224.007212] env[69927]: WARNING nova.compute.manager [req-1395baea-5ff2-4598-9ae4-fb74561627e6 req-617945da-7e07-46dd-a26b-bbc17392c38d service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Received unexpected event network-vif-plugged-70d5cdb3-0681-41ab-aa95-e5ae4a5245f3 for instance with vm_state active and task_state None. [ 1224.007212] env[69927]: DEBUG nova.compute.manager [req-1395baea-5ff2-4598-9ae4-fb74561627e6 req-617945da-7e07-46dd-a26b-bbc17392c38d service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Received event network-changed-70d5cdb3-0681-41ab-aa95-e5ae4a5245f3 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1224.008386] env[69927]: DEBUG nova.compute.manager [req-1395baea-5ff2-4598-9ae4-fb74561627e6 req-617945da-7e07-46dd-a26b-bbc17392c38d service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Refreshing instance network info cache due to event network-changed-70d5cdb3-0681-41ab-aa95-e5ae4a5245f3. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1224.008386] env[69927]: DEBUG oslo_concurrency.lockutils [req-1395baea-5ff2-4598-9ae4-fb74561627e6 req-617945da-7e07-46dd-a26b-bbc17392c38d service nova] Acquiring lock "refresh_cache-5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1224.049062] env[69927]: DEBUG oslo_vmware.api [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096819, 'name': ReconfigVM_Task, 'duration_secs': 0.992668} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.049796] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1224.050082] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Reconfigured VM to attach interface {{(pid=69927) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1224.099161] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1224.173735] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 7dbbe1bc-961b-498e-aaa1-3d4c8ac88a73] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1224.189872] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1224.193954] env[69927]: INFO nova.compute.manager [-] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Took 1.51 seconds to deallocate network for instance. [ 1224.325226] env[69927]: DEBUG nova.network.neutron [req-59a8bcdc-59a5-4d00-9607-c0ee3ce1719e req-ea2f26c4-5ce2-4e4e-b2aa-d86fc9415c01 service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Updated VIF entry in instance network info cache for port a54251d6-cc17-4c26-95aa-a11a00c08e5e. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1224.325226] env[69927]: DEBUG nova.network.neutron [req-59a8bcdc-59a5-4d00-9607-c0ee3ce1719e req-ea2f26c4-5ce2-4e4e-b2aa-d86fc9415c01 service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Updating instance_info_cache with network_info: [{"id": "a54251d6-cc17-4c26-95aa-a11a00c08e5e", "address": "fa:16:3e:58:62:a1", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa54251d6-cc", "ovs_interfaceid": "a54251d6-cc17-4c26-95aa-a11a00c08e5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "70d5cdb3-0681-41ab-aa95-e5ae4a5245f3", "address": "fa:16:3e:32:88:2b", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70d5cdb3-06", "ovs_interfaceid": "70d5cdb3-0681-41ab-aa95-e5ae4a5245f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.375598] env[69927]: DEBUG oslo_vmware.api [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096821, 'name': PowerOffVM_Task, 'duration_secs': 0.358099} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.378429] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1224.378617] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1224.380229] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-59fd3033-da44-4bde-9efb-38ba6755fdb0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.453648] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1224.453858] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1224.454809] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Deleting the datastore file [datastore1] e38222c4-3362-4d47-aee4-d26ccb4cbf3c {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1224.454809] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1b44599-1f9e-441c-81c6-3b302604e3f5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.463140] env[69927]: DEBUG oslo_vmware.api [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1224.463140] env[69927]: value = "task-4096823" [ 1224.463140] env[69927]: _type = "Task" [ 1224.463140] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.477305] env[69927]: DEBUG oslo_vmware.api [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096823, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.554560] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1992886b-3af2-44c3-86e2-616c1b4c30b0 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "interface-5c87c74d-5998-4dfc-bc3c-c2887ff25195-70d5cdb3-0681-41ab-aa95-e5ae4a5245f3" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.482s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1224.612943] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7365bed-761f-43bf-afa9-600ec8a4f970 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.622201] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56d333a9-b5fb-4409-b22c-886b20bbd5be {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.630151] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "853d85d4-e98f-4810-a8db-b2a820ebc071" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1224.630417] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "853d85d4-e98f-4810-a8db-b2a820ebc071" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1224.661305] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84965719-1d79-401b-a97b-5239fb36d384 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.666495] env[69927]: DEBUG nova.compute.manager [req-6078b13e-4c7f-4cbf-a59f-634d2f3d3a47 req-1bc91cc3-7222-43b6-958e-0395a9ad82e0 service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Received event network-vif-unplugged-d201dadc-ab89-4ede-8c29-41217e3af341 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1224.667044] env[69927]: DEBUG oslo_concurrency.lockutils [req-6078b13e-4c7f-4cbf-a59f-634d2f3d3a47 req-1bc91cc3-7222-43b6-958e-0395a9ad82e0 service nova] Acquiring lock "693a6c6b-8d1c-405e-bb17-73259e28f556-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1224.667044] env[69927]: DEBUG oslo_concurrency.lockutils [req-6078b13e-4c7f-4cbf-a59f-634d2f3d3a47 req-1bc91cc3-7222-43b6-958e-0395a9ad82e0 service nova] Lock "693a6c6b-8d1c-405e-bb17-73259e28f556-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1224.667269] env[69927]: DEBUG oslo_concurrency.lockutils [req-6078b13e-4c7f-4cbf-a59f-634d2f3d3a47 req-1bc91cc3-7222-43b6-958e-0395a9ad82e0 service nova] Lock "693a6c6b-8d1c-405e-bb17-73259e28f556-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1224.667959] env[69927]: DEBUG nova.compute.manager [req-6078b13e-4c7f-4cbf-a59f-634d2f3d3a47 req-1bc91cc3-7222-43b6-958e-0395a9ad82e0 service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] No waiting events found dispatching network-vif-unplugged-d201dadc-ab89-4ede-8c29-41217e3af341 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1224.667959] env[69927]: WARNING nova.compute.manager [req-6078b13e-4c7f-4cbf-a59f-634d2f3d3a47 req-1bc91cc3-7222-43b6-958e-0395a9ad82e0 service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Received unexpected event network-vif-unplugged-d201dadc-ab89-4ede-8c29-41217e3af341 for instance with vm_state shelved_offloaded and task_state None. [ 1224.667959] env[69927]: DEBUG nova.compute.manager [req-6078b13e-4c7f-4cbf-a59f-634d2f3d3a47 req-1bc91cc3-7222-43b6-958e-0395a9ad82e0 service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Received event network-changed-d201dadc-ab89-4ede-8c29-41217e3af341 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1224.667959] env[69927]: DEBUG nova.compute.manager [req-6078b13e-4c7f-4cbf-a59f-634d2f3d3a47 req-1bc91cc3-7222-43b6-958e-0395a9ad82e0 service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Refreshing instance network info cache due to event network-changed-d201dadc-ab89-4ede-8c29-41217e3af341. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1224.668445] env[69927]: DEBUG oslo_concurrency.lockutils [req-6078b13e-4c7f-4cbf-a59f-634d2f3d3a47 req-1bc91cc3-7222-43b6-958e-0395a9ad82e0 service nova] Acquiring lock "refresh_cache-693a6c6b-8d1c-405e-bb17-73259e28f556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1224.668445] env[69927]: DEBUG oslo_concurrency.lockutils [req-6078b13e-4c7f-4cbf-a59f-634d2f3d3a47 req-1bc91cc3-7222-43b6-958e-0395a9ad82e0 service nova] Acquired lock "refresh_cache-693a6c6b-8d1c-405e-bb17-73259e28f556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1224.668445] env[69927]: DEBUG nova.network.neutron [req-6078b13e-4c7f-4cbf-a59f-634d2f3d3a47 req-1bc91cc3-7222-43b6-958e-0395a9ad82e0 service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Refreshing network info cache for port d201dadc-ab89-4ede-8c29-41217e3af341 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1224.675431] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-026b1c80-b497-4650-9600-86eb55b923c7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.679912] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: c87680be-227e-4a3e-92d3-c2310623bfe4] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1224.693578] env[69927]: DEBUG nova.compute.provider_tree [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1224.700727] env[69927]: DEBUG oslo_concurrency.lockutils [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1224.827819] env[69927]: DEBUG oslo_concurrency.lockutils [req-59a8bcdc-59a5-4d00-9607-c0ee3ce1719e req-ea2f26c4-5ce2-4e4e-b2aa-d86fc9415c01 service nova] Releasing lock "refresh_cache-5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1224.828367] env[69927]: DEBUG oslo_concurrency.lockutils [req-1395baea-5ff2-4598-9ae4-fb74561627e6 req-617945da-7e07-46dd-a26b-bbc17392c38d service nova] Acquired lock "refresh_cache-5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1224.828644] env[69927]: DEBUG nova.network.neutron [req-1395baea-5ff2-4598-9ae4-fb74561627e6 req-617945da-7e07-46dd-a26b-bbc17392c38d service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Refreshing network info cache for port 70d5cdb3-0681-41ab-aa95-e5ae4a5245f3 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1224.975518] env[69927]: DEBUG oslo_vmware.api [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096823, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.364117} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.975518] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1224.975518] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1224.975518] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1224.975518] env[69927]: INFO nova.compute.manager [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1224.975859] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1224.975859] env[69927]: DEBUG nova.compute.manager [-] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1224.975959] env[69927]: DEBUG nova.network.neutron [-] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1225.134936] env[69927]: DEBUG nova.compute.manager [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1225.183743] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 74ea584f-b20f-425b-acb3-0ec60e7f2a1e] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1225.225694] env[69927]: ERROR nova.scheduler.client.report [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [req-bf1a02d5-4272-448b-a737-76620f2acd6a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2f529b36-df5f-4b37-8103-68f74f737726. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bf1a02d5-4272-448b-a737-76620f2acd6a"}]} [ 1225.248078] env[69927]: DEBUG nova.scheduler.client.report [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Refreshing inventories for resource provider 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1225.265842] env[69927]: DEBUG nova.scheduler.client.report [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Updating ProviderTree inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1225.266145] env[69927]: DEBUG nova.compute.provider_tree [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1225.279740] env[69927]: DEBUG nova.scheduler.client.report [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Refreshing aggregate associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, aggregates: None {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1225.313402] env[69927]: DEBUG nova.scheduler.client.report [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Refreshing trait associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1225.584713] env[69927]: DEBUG nova.network.neutron [req-6078b13e-4c7f-4cbf-a59f-634d2f3d3a47 req-1bc91cc3-7222-43b6-958e-0395a9ad82e0 service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Updated VIF entry in instance network info cache for port d201dadc-ab89-4ede-8c29-41217e3af341. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1225.585751] env[69927]: DEBUG nova.network.neutron [req-6078b13e-4c7f-4cbf-a59f-634d2f3d3a47 req-1bc91cc3-7222-43b6-958e-0395a9ad82e0 service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Updating instance_info_cache with network_info: [{"id": "d201dadc-ab89-4ede-8c29-41217e3af341", "address": "fa:16:3e:6b:c9:4b", "network": {"id": "4a19c4d6-f840-486a-b113-3ec076b5b34d", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-437129762-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.157", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b7ae5270b0643e6b5720d4f2f765d74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapd201dadc-ab", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1225.605951] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eefe027-3677-469e-bde7-cb1378c4c88a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.617663] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57adbf62-7224-4b81-a2f0-31b872721ae9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.658560] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8893b5d7-50be-41a0-a2a7-b1502cfac95c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.669809] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e663c3-0e9e-4c81-85fc-88307e1af348 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.685466] env[69927]: DEBUG nova.compute.provider_tree [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1225.687771] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1225.688196] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 01c8eb3b-bf30-4b00-af71-e32f0dc19171] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1225.698281] env[69927]: DEBUG nova.network.neutron [req-1395baea-5ff2-4598-9ae4-fb74561627e6 req-617945da-7e07-46dd-a26b-bbc17392c38d service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Updated VIF entry in instance network info cache for port 70d5cdb3-0681-41ab-aa95-e5ae4a5245f3. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1225.702120] env[69927]: DEBUG nova.network.neutron [req-1395baea-5ff2-4598-9ae4-fb74561627e6 req-617945da-7e07-46dd-a26b-bbc17392c38d service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Updating instance_info_cache with network_info: [{"id": "a54251d6-cc17-4c26-95aa-a11a00c08e5e", "address": "fa:16:3e:58:62:a1", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa54251d6-cc", "ovs_interfaceid": "a54251d6-cc17-4c26-95aa-a11a00c08e5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "70d5cdb3-0681-41ab-aa95-e5ae4a5245f3", "address": "fa:16:3e:32:88:2b", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70d5cdb3-06", "ovs_interfaceid": "70d5cdb3-0681-41ab-aa95-e5ae4a5245f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1225.968860] env[69927]: DEBUG nova.network.neutron [-] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1226.045750] env[69927]: DEBUG nova.compute.manager [req-8da511cc-e8c3-41f0-9fce-423597db4f25 req-a5e26573-ae97-4998-b93e-f979b504dfce service nova] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Received event network-vif-deleted-75ee960c-41d4-4858-8b1e-8198b77eb0d7 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1226.045991] env[69927]: DEBUG nova.compute.manager [req-8da511cc-e8c3-41f0-9fce-423597db4f25 req-a5e26573-ae97-4998-b93e-f979b504dfce service nova] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Received event network-vif-deleted-43121e87-0514-4fd2-a304-b280aae31175 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1226.046184] env[69927]: DEBUG nova.compute.manager [req-8da511cc-e8c3-41f0-9fce-423597db4f25 req-a5e26573-ae97-4998-b93e-f979b504dfce service nova] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Received event network-changed-e257b275-32fb-40b6-be25-78208eb9442e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1226.046345] env[69927]: DEBUG nova.compute.manager [req-8da511cc-e8c3-41f0-9fce-423597db4f25 req-a5e26573-ae97-4998-b93e-f979b504dfce service nova] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Refreshing instance network info cache due to event network-changed-e257b275-32fb-40b6-be25-78208eb9442e. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1226.046559] env[69927]: DEBUG oslo_concurrency.lockutils [req-8da511cc-e8c3-41f0-9fce-423597db4f25 req-a5e26573-ae97-4998-b93e-f979b504dfce service nova] Acquiring lock "refresh_cache-afdd23d0-c8e0-4d49-a188-525b6b3f31c8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1226.046698] env[69927]: DEBUG oslo_concurrency.lockutils [req-8da511cc-e8c3-41f0-9fce-423597db4f25 req-a5e26573-ae97-4998-b93e-f979b504dfce service nova] Acquired lock "refresh_cache-afdd23d0-c8e0-4d49-a188-525b6b3f31c8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1226.046858] env[69927]: DEBUG nova.network.neutron [req-8da511cc-e8c3-41f0-9fce-423597db4f25 req-a5e26573-ae97-4998-b93e-f979b504dfce service nova] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Refreshing network info cache for port e257b275-32fb-40b6-be25-78208eb9442e {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1226.090077] env[69927]: DEBUG oslo_concurrency.lockutils [req-6078b13e-4c7f-4cbf-a59f-634d2f3d3a47 req-1bc91cc3-7222-43b6-958e-0395a9ad82e0 service nova] Releasing lock "refresh_cache-693a6c6b-8d1c-405e-bb17-73259e28f556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1226.193713] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 67e00c40-35b6-4a9f-9505-19b804e78c04] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1226.201525] env[69927]: DEBUG oslo_concurrency.lockutils [req-1395baea-5ff2-4598-9ae4-fb74561627e6 req-617945da-7e07-46dd-a26b-bbc17392c38d service nova] Releasing lock "refresh_cache-5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1226.234021] env[69927]: DEBUG nova.scheduler.client.report [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Updated inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 with generation 160 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1226.234021] env[69927]: DEBUG nova.compute.provider_tree [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Updating resource provider 2f529b36-df5f-4b37-8103-68f74f737726 generation from 160 to 161 during operation: update_inventory {{(pid=69927) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1226.234021] env[69927]: DEBUG nova.compute.provider_tree [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1226.278467] env[69927]: DEBUG oslo_concurrency.lockutils [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "interface-5c87c74d-5998-4dfc-bc3c-c2887ff25195-70d5cdb3-0681-41ab-aa95-e5ae4a5245f3" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.278728] env[69927]: DEBUG oslo_concurrency.lockutils [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "interface-5c87c74d-5998-4dfc-bc3c-c2887ff25195-70d5cdb3-0681-41ab-aa95-e5ae4a5245f3" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.471304] env[69927]: INFO nova.compute.manager [-] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Took 1.50 seconds to deallocate network for instance. [ 1226.701507] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 5581f8af-9796-48ad-a2f3-557e90d9662a] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1226.735663] env[69927]: DEBUG oslo_concurrency.lockutils [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.407s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1226.736195] env[69927]: DEBUG nova.compute.manager [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1226.739203] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.640s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.739516] env[69927]: DEBUG nova.objects.instance [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lazy-loading 'resources' on Instance uuid 693a6c6b-8d1c-405e-bb17-73259e28f556 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1226.765780] env[69927]: DEBUG nova.network.neutron [req-8da511cc-e8c3-41f0-9fce-423597db4f25 req-a5e26573-ae97-4998-b93e-f979b504dfce service nova] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Updated VIF entry in instance network info cache for port e257b275-32fb-40b6-be25-78208eb9442e. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1226.766146] env[69927]: DEBUG nova.network.neutron [req-8da511cc-e8c3-41f0-9fce-423597db4f25 req-a5e26573-ae97-4998-b93e-f979b504dfce service nova] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Updating instance_info_cache with network_info: [{"id": "e257b275-32fb-40b6-be25-78208eb9442e", "address": "fa:16:3e:de:10:f7", "network": {"id": "095e5f02-dbfd-4d96-bd31-c8c5e870e449", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-240997183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "71a47794e5824701925ad4bdc3651196", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7b7edd0-124a-48ec-ae26-1aa14f9b884a", "external-id": "nsx-vlan-transportzone-861", "segmentation_id": 861, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape257b275-32", "ovs_interfaceid": "e257b275-32fb-40b6-be25-78208eb9442e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1226.781313] env[69927]: DEBUG oslo_concurrency.lockutils [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1226.781562] env[69927]: DEBUG oslo_concurrency.lockutils [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1226.782494] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f04ad6df-0b36-4df3-b326-2a96c6818edf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.806568] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4432bab3-cd0a-48c2-94bb-ad28342424fd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.833655] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Reconfiguring VM to detach interface {{(pid=69927) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1226.833947] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67458dd8-f3ac-4da2-89b8-c8fa66256911 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.853721] env[69927]: DEBUG oslo_vmware.api [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1226.853721] env[69927]: value = "task-4096824" [ 1226.853721] env[69927]: _type = "Task" [ 1226.853721] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.864060] env[69927]: DEBUG oslo_vmware.api [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096824, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.980134] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1227.204659] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 27e20d58-1150-4b90-b888-d84aff1954ef] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1227.242445] env[69927]: DEBUG nova.compute.utils [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1227.244016] env[69927]: DEBUG nova.objects.instance [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lazy-loading 'numa_topology' on Instance uuid 693a6c6b-8d1c-405e-bb17-73259e28f556 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1227.245407] env[69927]: DEBUG nova.compute.manager [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1227.245407] env[69927]: DEBUG nova.network.neutron [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1227.269499] env[69927]: DEBUG oslo_concurrency.lockutils [req-8da511cc-e8c3-41f0-9fce-423597db4f25 req-a5e26573-ae97-4998-b93e-f979b504dfce service nova] Releasing lock "refresh_cache-afdd23d0-c8e0-4d49-a188-525b6b3f31c8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1227.284632] env[69927]: DEBUG nova.policy [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f5bbec0b58974b25b75ddbf591427809', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b2750a80142e4dc1b12a6caf543768ef', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1227.292108] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "693a6c6b-8d1c-405e-bb17-73259e28f556" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1227.364667] env[69927]: DEBUG oslo_vmware.api [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096824, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.557332] env[69927]: DEBUG nova.network.neutron [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Successfully created port: 571ec5f9-628f-4a79-8f19-13c41eb94377 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1227.708370] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 44e81156-b0c7-4f68-9732-b39f41ebcd4b] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1227.746816] env[69927]: DEBUG nova.compute.manager [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1227.751337] env[69927]: DEBUG nova.objects.base [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Object Instance<693a6c6b-8d1c-405e-bb17-73259e28f556> lazy-loaded attributes: resources,numa_topology {{(pid=69927) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1227.867219] env[69927]: DEBUG oslo_vmware.api [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096824, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.954439] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1278a40b-4e54-4f88-b806-7cb1bf32d7a2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.962023] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1be6c89b-388f-4847-b9aa-9f3bb9990031 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.993035] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e83679-0a35-4f6c-88f3-b5f0fd2f8313 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.000421] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecf46108-aff4-4ee0-b5d2-1cfa24441ca5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.014540] env[69927]: DEBUG nova.compute.provider_tree [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1228.212146] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 30d9d1ac-4be0-4723-86b5-0aceda88e67b] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1228.365922] env[69927]: DEBUG oslo_vmware.api [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096824, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.518986] env[69927]: DEBUG nova.scheduler.client.report [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1228.715868] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 358ecaef-37f0-42be-acce-00f389650c97] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1228.756009] env[69927]: DEBUG nova.compute.manager [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1228.785414] env[69927]: DEBUG nova.virt.hardware [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1228.785660] env[69927]: DEBUG nova.virt.hardware [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1228.785818] env[69927]: DEBUG nova.virt.hardware [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1228.786021] env[69927]: DEBUG nova.virt.hardware [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1228.786184] env[69927]: DEBUG nova.virt.hardware [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1228.786337] env[69927]: DEBUG nova.virt.hardware [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1228.786864] env[69927]: DEBUG nova.virt.hardware [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1228.786983] env[69927]: DEBUG nova.virt.hardware [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1228.787310] env[69927]: DEBUG nova.virt.hardware [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1228.787478] env[69927]: DEBUG nova.virt.hardware [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1228.787705] env[69927]: DEBUG nova.virt.hardware [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1228.788620] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e606a9e-cd02-4ca2-ac43-6cb4c2639bda {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.797113] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-905d3d4d-82e5-4c74-989c-9ad316b2ea61 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.865978] env[69927]: DEBUG oslo_vmware.api [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096824, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.940059] env[69927]: DEBUG nova.compute.manager [req-0e90345c-ae1d-41fe-b904-c22c02b4fe1a req-81c5391c-6d54-488f-8723-e51dcd2723fa service nova] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Received event network-vif-plugged-571ec5f9-628f-4a79-8f19-13c41eb94377 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1228.940240] env[69927]: DEBUG oslo_concurrency.lockutils [req-0e90345c-ae1d-41fe-b904-c22c02b4fe1a req-81c5391c-6d54-488f-8723-e51dcd2723fa service nova] Acquiring lock "80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1228.940457] env[69927]: DEBUG oslo_concurrency.lockutils [req-0e90345c-ae1d-41fe-b904-c22c02b4fe1a req-81c5391c-6d54-488f-8723-e51dcd2723fa service nova] Lock "80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1228.940628] env[69927]: DEBUG oslo_concurrency.lockutils [req-0e90345c-ae1d-41fe-b904-c22c02b4fe1a req-81c5391c-6d54-488f-8723-e51dcd2723fa service nova] Lock "80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1228.940794] env[69927]: DEBUG nova.compute.manager [req-0e90345c-ae1d-41fe-b904-c22c02b4fe1a req-81c5391c-6d54-488f-8723-e51dcd2723fa service nova] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] No waiting events found dispatching network-vif-plugged-571ec5f9-628f-4a79-8f19-13c41eb94377 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1228.940956] env[69927]: WARNING nova.compute.manager [req-0e90345c-ae1d-41fe-b904-c22c02b4fe1a req-81c5391c-6d54-488f-8723-e51dcd2723fa service nova] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Received unexpected event network-vif-plugged-571ec5f9-628f-4a79-8f19-13c41eb94377 for instance with vm_state building and task_state spawning. [ 1229.024969] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.285s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1229.026567] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.837s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1229.028444] env[69927]: INFO nova.compute.claims [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1229.032945] env[69927]: DEBUG nova.network.neutron [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Successfully updated port: 571ec5f9-628f-4a79-8f19-13c41eb94377 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1229.220055] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 8be7e64c-7bc6-41a0-ada5-0a5057a2af45] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1229.366620] env[69927]: DEBUG oslo_vmware.api [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096824, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.539265] env[69927]: DEBUG oslo_concurrency.lockutils [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "refresh_cache-80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1229.539424] env[69927]: DEBUG oslo_concurrency.lockutils [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquired lock "refresh_cache-80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1229.539574] env[69927]: DEBUG nova.network.neutron [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1229.542028] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b9f10511-eb38-4244-b270-8438227f2d72 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "693a6c6b-8d1c-405e-bb17-73259e28f556" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 30.057s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1229.542645] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "693a6c6b-8d1c-405e-bb17-73259e28f556" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 2.251s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1229.542999] env[69927]: INFO nova.compute.manager [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Unshelving [ 1229.723477] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 50eedb80-d4bc-42c4-9686-6549cbd675b7] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1229.868725] env[69927]: DEBUG oslo_vmware.api [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096824, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.082935] env[69927]: DEBUG nova.network.neutron [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1230.223721] env[69927]: DEBUG nova.network.neutron [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Updating instance_info_cache with network_info: [{"id": "571ec5f9-628f-4a79-8f19-13c41eb94377", "address": "fa:16:3e:37:97:cb", "network": {"id": "8b69632f-2333-4f76-bdfe-a301ba92d3b7", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1292205702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2750a80142e4dc1b12a6caf543768ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d177c5b3-a5b1-4c78-854e-7e0dbf341ea1", "external-id": "nsx-vlan-transportzone-54", "segmentation_id": 54, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap571ec5f9-62", "ovs_interfaceid": "571ec5f9-628f-4a79-8f19-13c41eb94377", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1230.228647] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 9363c664-5848-408b-9b03-2dea4ceded90] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1230.245573] env[69927]: DEBUG oslo_concurrency.lockutils [None req-818efc27-88c9-408f-b844-0a122c1793a5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "8b70b479-4a54-4bcb-813d-16cc0c9a67c5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1230.245804] env[69927]: DEBUG oslo_concurrency.lockutils [None req-818efc27-88c9-408f-b844-0a122c1793a5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "8b70b479-4a54-4bcb-813d-16cc0c9a67c5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1230.267763] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a28c8ef-79f7-479c-97c4-13d3e5d0cd31 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.275691] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ee8316-1056-4f1a-9e29-b1618577708c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.305471] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02cf8c61-bfe6-48ed-a649-c690dd26d84e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.312843] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717fcc3e-e43a-4120-93d8-ecd925bdacea {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.326272] env[69927]: DEBUG nova.compute.provider_tree [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1230.369503] env[69927]: DEBUG oslo_vmware.api [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096824, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.567637] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1230.726186] env[69927]: DEBUG oslo_concurrency.lockutils [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Releasing lock "refresh_cache-80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1230.726558] env[69927]: DEBUG nova.compute.manager [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Instance network_info: |[{"id": "571ec5f9-628f-4a79-8f19-13c41eb94377", "address": "fa:16:3e:37:97:cb", "network": {"id": "8b69632f-2333-4f76-bdfe-a301ba92d3b7", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1292205702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2750a80142e4dc1b12a6caf543768ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d177c5b3-a5b1-4c78-854e-7e0dbf341ea1", "external-id": "nsx-vlan-transportzone-54", "segmentation_id": 54, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap571ec5f9-62", "ovs_interfaceid": "571ec5f9-628f-4a79-8f19-13c41eb94377", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1230.726999] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:97:cb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd177c5b3-a5b1-4c78-854e-7e0dbf341ea1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '571ec5f9-628f-4a79-8f19-13c41eb94377', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1230.735091] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Creating folder: Project (b2750a80142e4dc1b12a6caf543768ef). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1230.735528] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: e1946033-4ec3-4561-afdf-a3b748f7c611] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1230.737326] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-231bc35f-4890-4420-a71b-c905e8b587db {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.749747] env[69927]: DEBUG nova.compute.utils [None req-818efc27-88c9-408f-b844-0a122c1793a5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1230.752706] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Created folder: Project (b2750a80142e4dc1b12a6caf543768ef) in parent group-v811283. [ 1230.753156] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Creating folder: Instances. Parent ref: group-v811588. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1230.753428] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eaae91f0-be0f-4efe-9e73-4c0ffcc98de3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.764843] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Created folder: Instances in parent group-v811588. [ 1230.765147] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1230.765361] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1230.765575] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f5525073-fa63-4d58-96a4-25367e6b1b47 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.786370] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1230.786370] env[69927]: value = "task-4096827" [ 1230.786370] env[69927]: _type = "Task" [ 1230.786370] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.795248] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096827, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.829205] env[69927]: DEBUG nova.scheduler.client.report [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1230.868775] env[69927]: DEBUG oslo_vmware.api [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096824, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.966793] env[69927]: DEBUG nova.compute.manager [req-39d55ccd-031b-4a43-9966-1757d20e575d req-8e317e0a-0769-491c-bc52-f414a3fec574 service nova] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Received event network-changed-571ec5f9-628f-4a79-8f19-13c41eb94377 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1230.966921] env[69927]: DEBUG nova.compute.manager [req-39d55ccd-031b-4a43-9966-1757d20e575d req-8e317e0a-0769-491c-bc52-f414a3fec574 service nova] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Refreshing instance network info cache due to event network-changed-571ec5f9-628f-4a79-8f19-13c41eb94377. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1230.967197] env[69927]: DEBUG oslo_concurrency.lockutils [req-39d55ccd-031b-4a43-9966-1757d20e575d req-8e317e0a-0769-491c-bc52-f414a3fec574 service nova] Acquiring lock "refresh_cache-80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1230.967319] env[69927]: DEBUG oslo_concurrency.lockutils [req-39d55ccd-031b-4a43-9966-1757d20e575d req-8e317e0a-0769-491c-bc52-f414a3fec574 service nova] Acquired lock "refresh_cache-80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1230.967491] env[69927]: DEBUG nova.network.neutron [req-39d55ccd-031b-4a43-9966-1757d20e575d req-8e317e0a-0769-491c-bc52-f414a3fec574 service nova] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Refreshing network info cache for port 571ec5f9-628f-4a79-8f19-13c41eb94377 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1231.241105] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: c3a531fd-647c-43b6-9d3d-fc6ecbc2445e] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1231.254882] env[69927]: DEBUG oslo_concurrency.lockutils [None req-818efc27-88c9-408f-b844-0a122c1793a5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "8b70b479-4a54-4bcb-813d-16cc0c9a67c5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.296424] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096827, 'name': CreateVM_Task, 'duration_secs': 0.326145} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.296634] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1231.297320] env[69927]: DEBUG oslo_concurrency.lockutils [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1231.297517] env[69927]: DEBUG oslo_concurrency.lockutils [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1231.297876] env[69927]: DEBUG oslo_concurrency.lockutils [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1231.298407] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8023b2b3-df16-468d-854b-3b23fb29525d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.303415] env[69927]: DEBUG oslo_vmware.api [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1231.303415] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52084686-f9f2-ba2c-3272-5f062a873dd5" [ 1231.303415] env[69927]: _type = "Task" [ 1231.303415] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.311239] env[69927]: DEBUG oslo_vmware.api [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52084686-f9f2-ba2c-3272-5f062a873dd5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.334570] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.308s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.335016] env[69927]: DEBUG nova.compute.manager [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1231.337510] env[69927]: DEBUG oslo_concurrency.lockutils [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.638s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1231.337700] env[69927]: DEBUG oslo_concurrency.lockutils [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.339715] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.652s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1231.341601] env[69927]: INFO nova.compute.claims [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1231.364759] env[69927]: INFO nova.scheduler.client.report [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Deleted allocations for instance 56aec5c2-d344-4a8d-a55a-930bc425150a [ 1231.376198] env[69927]: DEBUG oslo_vmware.api [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096824, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.680936] env[69927]: DEBUG nova.network.neutron [req-39d55ccd-031b-4a43-9966-1757d20e575d req-8e317e0a-0769-491c-bc52-f414a3fec574 service nova] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Updated VIF entry in instance network info cache for port 571ec5f9-628f-4a79-8f19-13c41eb94377. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1231.681333] env[69927]: DEBUG nova.network.neutron [req-39d55ccd-031b-4a43-9966-1757d20e575d req-8e317e0a-0769-491c-bc52-f414a3fec574 service nova] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Updating instance_info_cache with network_info: [{"id": "571ec5f9-628f-4a79-8f19-13c41eb94377", "address": "fa:16:3e:37:97:cb", "network": {"id": "8b69632f-2333-4f76-bdfe-a301ba92d3b7", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1292205702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2750a80142e4dc1b12a6caf543768ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d177c5b3-a5b1-4c78-854e-7e0dbf341ea1", "external-id": "nsx-vlan-transportzone-54", "segmentation_id": 54, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap571ec5f9-62", "ovs_interfaceid": "571ec5f9-628f-4a79-8f19-13c41eb94377", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1231.744230] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: a9a62523-50fb-44b2-bfc8-9c6664dbf050] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1231.815543] env[69927]: DEBUG oslo_vmware.api [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52084686-f9f2-ba2c-3272-5f062a873dd5, 'name': SearchDatastore_Task, 'duration_secs': 0.010393} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.816057] env[69927]: DEBUG oslo_concurrency.lockutils [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1231.816154] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1231.816414] env[69927]: DEBUG oslo_concurrency.lockutils [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1231.816613] env[69927]: DEBUG oslo_concurrency.lockutils [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1231.816756] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1231.817099] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fba734ac-633f-4f87-a460-0be02118b958 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.830319] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1231.830477] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1231.831836] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2da09bc-3cfa-42f7-8050-4be7dba7269e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.840024] env[69927]: DEBUG oslo_vmware.api [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1231.840024] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520ad7ed-d13b-ba0a-b74d-aafe015b7868" [ 1231.840024] env[69927]: _type = "Task" [ 1231.840024] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.848244] env[69927]: DEBUG nova.compute.utils [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1231.853311] env[69927]: DEBUG nova.compute.manager [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1231.853592] env[69927]: DEBUG nova.network.neutron [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1231.855734] env[69927]: DEBUG oslo_vmware.api [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520ad7ed-d13b-ba0a-b74d-aafe015b7868, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.873252] env[69927]: DEBUG oslo_vmware.api [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096824, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.873762] env[69927]: DEBUG oslo_concurrency.lockutils [None req-793d5548-5de4-4559-86f6-4a39e66edaad tempest-DeleteServersTestJSON-1690074738 tempest-DeleteServersTestJSON-1690074738-project-member] Lock "56aec5c2-d344-4a8d-a55a-930bc425150a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.989s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.896080] env[69927]: DEBUG nova.policy [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f5bbec0b58974b25b75ddbf591427809', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b2750a80142e4dc1b12a6caf543768ef', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1232.146781] env[69927]: DEBUG nova.network.neutron [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Successfully created port: 340dd818-a9e9-44ba-8a3b-a6bf475270e5 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1232.183609] env[69927]: DEBUG oslo_concurrency.lockutils [req-39d55ccd-031b-4a43-9966-1757d20e575d req-8e317e0a-0769-491c-bc52-f414a3fec574 service nova] Releasing lock "refresh_cache-80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1232.247505] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: cb35090d-bfd2-46df-8ee5-d9b068ba0a28] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1232.327324] env[69927]: DEBUG oslo_concurrency.lockutils [None req-818efc27-88c9-408f-b844-0a122c1793a5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "8b70b479-4a54-4bcb-813d-16cc0c9a67c5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1232.330460] env[69927]: DEBUG oslo_concurrency.lockutils [None req-818efc27-88c9-408f-b844-0a122c1793a5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "8b70b479-4a54-4bcb-813d-16cc0c9a67c5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1232.330460] env[69927]: INFO nova.compute.manager [None req-818efc27-88c9-408f-b844-0a122c1793a5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Attaching volume f5e28970-f462-4070-87f2-2e1e9b4cab4f to /dev/sdb [ 1232.351607] env[69927]: DEBUG oslo_vmware.api [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520ad7ed-d13b-ba0a-b74d-aafe015b7868, 'name': SearchDatastore_Task, 'duration_secs': 0.025983} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.352409] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd06edb8-e1f8-4f3a-85ed-d5f869c057d8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.361089] env[69927]: DEBUG nova.compute.manager [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1232.363954] env[69927]: DEBUG oslo_vmware.api [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1232.363954] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529f5110-9d2b-19f3-f547-ec9df1103b22" [ 1232.363954] env[69927]: _type = "Task" [ 1232.363954] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.366363] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26bb60b0-0d7b-4210-be33-6a5d5ba24556 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.388358] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4c1e92d-da3d-4260-9d06-f436a2b93027 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.390898] env[69927]: DEBUG oslo_vmware.api [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096824, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.391162] env[69927]: DEBUG oslo_vmware.api [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529f5110-9d2b-19f3-f547-ec9df1103b22, 'name': SearchDatastore_Task, 'duration_secs': 0.009737} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.393788] env[69927]: DEBUG oslo_concurrency.lockutils [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1232.394055] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d/80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1232.394815] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-78154c05-ec85-4326-8b19-1b10ab834d3c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.407432] env[69927]: DEBUG nova.virt.block_device [None req-818efc27-88c9-408f-b844-0a122c1793a5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Updating existing volume attachment record: ec64902f-3043-4580-9b41-25fe4061c169 {{(pid=69927) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1232.414453] env[69927]: DEBUG oslo_vmware.api [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1232.414453] env[69927]: value = "task-4096829" [ 1232.414453] env[69927]: _type = "Task" [ 1232.414453] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.425803] env[69927]: DEBUG oslo_vmware.api [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096829, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.590865] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06738b88-4cc3-4e34-939d-8d33592e4b0f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.601659] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94bf7c36-d931-422b-ad01-bdc0d60a597e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.634894] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04bb3df4-2f28-4144-acf8-ae314edbf27c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.643232] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7cc9153-3570-4d65-88bc-8375bf344a9c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.658155] env[69927]: DEBUG nova.compute.provider_tree [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1232.755677] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: a2b1684f-82af-42fc-925e-db36f31cfe63] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1232.879448] env[69927]: DEBUG oslo_vmware.api [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096824, 'name': ReconfigVM_Task, 'duration_secs': 5.935321} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.879984] env[69927]: DEBUG oslo_concurrency.lockutils [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1232.880067] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Reconfigured VM to detach interface {{(pid=69927) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1232.926388] env[69927]: DEBUG oslo_vmware.api [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096829, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.162303] env[69927]: DEBUG nova.scheduler.client.report [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1233.259914] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: c3e8a429-8484-4b11-abe3-1cccf0992556] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1233.374857] env[69927]: DEBUG nova.compute.manager [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1233.399756] env[69927]: DEBUG nova.virt.hardware [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1233.400012] env[69927]: DEBUG nova.virt.hardware [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1233.400724] env[69927]: DEBUG nova.virt.hardware [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1233.400724] env[69927]: DEBUG nova.virt.hardware [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1233.400724] env[69927]: DEBUG nova.virt.hardware [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1233.400918] env[69927]: DEBUG nova.virt.hardware [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1233.401143] env[69927]: DEBUG nova.virt.hardware [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1233.401319] env[69927]: DEBUG nova.virt.hardware [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1233.401486] env[69927]: DEBUG nova.virt.hardware [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1233.401648] env[69927]: DEBUG nova.virt.hardware [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1233.401817] env[69927]: DEBUG nova.virt.hardware [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1233.402799] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-574e45c2-4bb7-4fe4-b352-9d22f06668fb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.411627] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2159463-9e90-41dd-a7bf-66bda7be1ee1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.433832] env[69927]: DEBUG oslo_vmware.api [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096829, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.603147} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.434126] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d/80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1233.434384] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1233.434651] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8a97a267-a3ab-4ced-a3e1-c50f62af60b8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.442117] env[69927]: DEBUG oslo_vmware.api [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1233.442117] env[69927]: value = "task-4096833" [ 1233.442117] env[69927]: _type = "Task" [ 1233.442117] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.451465] env[69927]: DEBUG oslo_vmware.api [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096833, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.553823] env[69927]: DEBUG nova.compute.manager [req-b8576d3a-858c-486a-9429-ab72fa259881 req-2c946d8f-96c0-441e-82dd-7daeb6fdebe9 service nova] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Received event network-vif-plugged-340dd818-a9e9-44ba-8a3b-a6bf475270e5 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1233.554086] env[69927]: DEBUG oslo_concurrency.lockutils [req-b8576d3a-858c-486a-9429-ab72fa259881 req-2c946d8f-96c0-441e-82dd-7daeb6fdebe9 service nova] Acquiring lock "659e2584-88a8-4382-98c8-f50fcab78e0c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1233.554346] env[69927]: DEBUG oslo_concurrency.lockutils [req-b8576d3a-858c-486a-9429-ab72fa259881 req-2c946d8f-96c0-441e-82dd-7daeb6fdebe9 service nova] Lock "659e2584-88a8-4382-98c8-f50fcab78e0c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1233.554516] env[69927]: DEBUG oslo_concurrency.lockutils [req-b8576d3a-858c-486a-9429-ab72fa259881 req-2c946d8f-96c0-441e-82dd-7daeb6fdebe9 service nova] Lock "659e2584-88a8-4382-98c8-f50fcab78e0c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.554707] env[69927]: DEBUG nova.compute.manager [req-b8576d3a-858c-486a-9429-ab72fa259881 req-2c946d8f-96c0-441e-82dd-7daeb6fdebe9 service nova] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] No waiting events found dispatching network-vif-plugged-340dd818-a9e9-44ba-8a3b-a6bf475270e5 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1233.554889] env[69927]: WARNING nova.compute.manager [req-b8576d3a-858c-486a-9429-ab72fa259881 req-2c946d8f-96c0-441e-82dd-7daeb6fdebe9 service nova] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Received unexpected event network-vif-plugged-340dd818-a9e9-44ba-8a3b-a6bf475270e5 for instance with vm_state building and task_state spawning. [ 1233.637617] env[69927]: DEBUG nova.network.neutron [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Successfully updated port: 340dd818-a9e9-44ba-8a3b-a6bf475270e5 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1233.671463] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.329s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.671463] env[69927]: DEBUG nova.compute.manager [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1233.672043] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.692s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1233.672276] env[69927]: DEBUG nova.objects.instance [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lazy-loading 'resources' on Instance uuid e38222c4-3362-4d47-aee4-d26ccb4cbf3c {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1233.763037] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: e1b3d0bc-a251-4dbd-89a6-216a2f2c1313] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1233.952203] env[69927]: DEBUG oslo_vmware.api [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096833, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068421} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.952478] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1233.953275] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2300f386-8c19-4346-9925-aeae8932ff02 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.975421] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d/80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1233.975736] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c94512f-2571-4db3-a8c9-a91878f20e39 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.996128] env[69927]: DEBUG oslo_vmware.api [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1233.996128] env[69927]: value = "task-4096834" [ 1233.996128] env[69927]: _type = "Task" [ 1233.996128] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.007207] env[69927]: DEBUG oslo_vmware.api [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096834, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.140388] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "refresh_cache-659e2584-88a8-4382-98c8-f50fcab78e0c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1234.140487] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquired lock "refresh_cache-659e2584-88a8-4382-98c8-f50fcab78e0c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1234.140658] env[69927]: DEBUG nova.network.neutron [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1234.175171] env[69927]: DEBUG nova.compute.utils [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1234.176584] env[69927]: DEBUG nova.compute.manager [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1234.176751] env[69927]: DEBUG nova.network.neutron [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1234.210824] env[69927]: DEBUG oslo_concurrency.lockutils [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "refresh_cache-5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1234.210965] env[69927]: DEBUG oslo_concurrency.lockutils [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquired lock "refresh_cache-5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1234.211116] env[69927]: DEBUG nova.network.neutron [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1234.223032] env[69927]: DEBUG nova.policy [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f5bbec0b58974b25b75ddbf591427809', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b2750a80142e4dc1b12a6caf543768ef', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1234.266857] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: e0bca101-cf8d-48e1-a331-b0018548593e] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1234.421089] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26608e6e-0dac-48a4-9c3f-c596dfaae3cd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.429623] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b7afe88-56e6-4ae8-a0b3-4bffd356882c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.461404] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7795f1e7-41b2-4be2-9ea6-000d9bc61316 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.470396] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515e3260-5811-4fa4-9ce7-debe5c115305 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.482727] env[69927]: DEBUG nova.compute.provider_tree [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1234.507414] env[69927]: DEBUG oslo_vmware.api [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096834, 'name': ReconfigVM_Task, 'duration_secs': 0.280115} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.507691] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Reconfigured VM instance instance-00000070 to attach disk [datastore2] 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d/80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1234.508441] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-65f109ce-006b-4e8e-8e2c-77dd02d3b129 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.515749] env[69927]: DEBUG oslo_vmware.api [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1234.515749] env[69927]: value = "task-4096835" [ 1234.515749] env[69927]: _type = "Task" [ 1234.515749] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.526875] env[69927]: DEBUG oslo_vmware.api [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096835, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.527535] env[69927]: DEBUG nova.network.neutron [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Successfully created port: f6b836b5-0070-4fdc-8398-6bd6efe3e550 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1234.680265] env[69927]: DEBUG nova.compute.manager [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1234.689757] env[69927]: DEBUG nova.network.neutron [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1234.726842] env[69927]: DEBUG oslo_concurrency.lockutils [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "5c87c74d-5998-4dfc-bc3c-c2887ff25195" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1234.727129] env[69927]: DEBUG oslo_concurrency.lockutils [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "5c87c74d-5998-4dfc-bc3c-c2887ff25195" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.727346] env[69927]: DEBUG oslo_concurrency.lockutils [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "5c87c74d-5998-4dfc-bc3c-c2887ff25195-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1234.727559] env[69927]: DEBUG oslo_concurrency.lockutils [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "5c87c74d-5998-4dfc-bc3c-c2887ff25195-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.727750] env[69927]: DEBUG oslo_concurrency.lockutils [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "5c87c74d-5998-4dfc-bc3c-c2887ff25195-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1234.730266] env[69927]: INFO nova.compute.manager [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Terminating instance [ 1234.770789] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: ff227e07-8e36-48d6-a8c7-1e0087fd1faa] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1234.881769] env[69927]: DEBUG nova.network.neutron [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Updating instance_info_cache with network_info: [{"id": "340dd818-a9e9-44ba-8a3b-a6bf475270e5", "address": "fa:16:3e:26:dd:24", "network": {"id": "8b69632f-2333-4f76-bdfe-a301ba92d3b7", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1292205702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2750a80142e4dc1b12a6caf543768ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d177c5b3-a5b1-4c78-854e-7e0dbf341ea1", "external-id": "nsx-vlan-transportzone-54", "segmentation_id": 54, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap340dd818-a9", "ovs_interfaceid": "340dd818-a9e9-44ba-8a3b-a6bf475270e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1234.941903] env[69927]: INFO nova.network.neutron [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Port 70d5cdb3-0681-41ab-aa95-e5ae4a5245f3 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1234.942274] env[69927]: DEBUG nova.network.neutron [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Updating instance_info_cache with network_info: [{"id": "a54251d6-cc17-4c26-95aa-a11a00c08e5e", "address": "fa:16:3e:58:62:a1", "network": {"id": "b8b342c3-e0d7-4186-9541-03e865142f8a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-283085538-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ed984d7170742eca7e89bf3bf45e6ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa54251d6-cc", "ovs_interfaceid": "a54251d6-cc17-4c26-95aa-a11a00c08e5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1234.986756] env[69927]: DEBUG nova.scheduler.client.report [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1235.026046] env[69927]: DEBUG oslo_vmware.api [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096835, 'name': Rename_Task, 'duration_secs': 0.142477} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.026374] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1235.026710] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dbcee897-5969-4859-ac80-75816a9d8143 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.034364] env[69927]: DEBUG oslo_vmware.api [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1235.034364] env[69927]: value = "task-4096836" [ 1235.034364] env[69927]: _type = "Task" [ 1235.034364] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.043919] env[69927]: DEBUG oslo_vmware.api [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096836, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.153705] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8786197e-917d-4df3-9d12-fcb76b53ad82 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "b007a697-7da4-4c97-9ccb-046d86b27568" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.153958] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8786197e-917d-4df3-9d12-fcb76b53ad82 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "b007a697-7da4-4c97-9ccb-046d86b27568" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.234250] env[69927]: DEBUG nova.compute.manager [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1235.234562] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1235.235497] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62544f58-1d8f-498e-8fc6-6465f01d5405 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.244103] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1235.244396] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c058633-a8ad-48af-a41f-bcdc74b9bad7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.251428] env[69927]: DEBUG oslo_vmware.api [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1235.251428] env[69927]: value = "task-4096838" [ 1235.251428] env[69927]: _type = "Task" [ 1235.251428] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.260645] env[69927]: DEBUG oslo_vmware.api [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096838, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.274547] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 21b7b237-557e-4030-93bb-6b5ce417e53c] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1235.382759] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Releasing lock "refresh_cache-659e2584-88a8-4382-98c8-f50fcab78e0c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1235.383085] env[69927]: DEBUG nova.compute.manager [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Instance network_info: |[{"id": "340dd818-a9e9-44ba-8a3b-a6bf475270e5", "address": "fa:16:3e:26:dd:24", "network": {"id": "8b69632f-2333-4f76-bdfe-a301ba92d3b7", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1292205702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2750a80142e4dc1b12a6caf543768ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d177c5b3-a5b1-4c78-854e-7e0dbf341ea1", "external-id": "nsx-vlan-transportzone-54", "segmentation_id": 54, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap340dd818-a9", "ovs_interfaceid": "340dd818-a9e9-44ba-8a3b-a6bf475270e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1235.383576] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:dd:24', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd177c5b3-a5b1-4c78-854e-7e0dbf341ea1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '340dd818-a9e9-44ba-8a3b-a6bf475270e5', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1235.393889] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1235.394061] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1235.394400] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-68dfffea-2236-4062-b718-533e7e532e81 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.429451] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1235.429451] env[69927]: value = "task-4096839" [ 1235.429451] env[69927]: _type = "Task" [ 1235.429451] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.439404] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096839, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.445210] env[69927]: DEBUG oslo_concurrency.lockutils [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Releasing lock "refresh_cache-5c87c74d-5998-4dfc-bc3c-c2887ff25195" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1235.492228] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.820s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1235.494980] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.927s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.495255] env[69927]: DEBUG nova.objects.instance [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lazy-loading 'pci_requests' on Instance uuid 693a6c6b-8d1c-405e-bb17-73259e28f556 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1235.530683] env[69927]: INFO nova.scheduler.client.report [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Deleted allocations for instance e38222c4-3362-4d47-aee4-d26ccb4cbf3c [ 1235.545843] env[69927]: DEBUG oslo_vmware.api [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096836, 'name': PowerOnVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.658230] env[69927]: DEBUG nova.compute.utils [None req-8786197e-917d-4df3-9d12-fcb76b53ad82 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1235.677319] env[69927]: DEBUG nova.compute.manager [req-d9cb4adf-3949-4f41-8540-68adce842f2d req-ae92c18c-d317-4a50-8c1d-f6d38c034fef service nova] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Received event network-changed-340dd818-a9e9-44ba-8a3b-a6bf475270e5 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1235.677548] env[69927]: DEBUG nova.compute.manager [req-d9cb4adf-3949-4f41-8540-68adce842f2d req-ae92c18c-d317-4a50-8c1d-f6d38c034fef service nova] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Refreshing instance network info cache due to event network-changed-340dd818-a9e9-44ba-8a3b-a6bf475270e5. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1235.677885] env[69927]: DEBUG oslo_concurrency.lockutils [req-d9cb4adf-3949-4f41-8540-68adce842f2d req-ae92c18c-d317-4a50-8c1d-f6d38c034fef service nova] Acquiring lock "refresh_cache-659e2584-88a8-4382-98c8-f50fcab78e0c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.677885] env[69927]: DEBUG oslo_concurrency.lockutils [req-d9cb4adf-3949-4f41-8540-68adce842f2d req-ae92c18c-d317-4a50-8c1d-f6d38c034fef service nova] Acquired lock "refresh_cache-659e2584-88a8-4382-98c8-f50fcab78e0c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1235.678018] env[69927]: DEBUG nova.network.neutron [req-d9cb4adf-3949-4f41-8540-68adce842f2d req-ae92c18c-d317-4a50-8c1d-f6d38c034fef service nova] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Refreshing network info cache for port 340dd818-a9e9-44ba-8a3b-a6bf475270e5 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1235.690836] env[69927]: DEBUG nova.compute.manager [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1235.721416] env[69927]: DEBUG nova.virt.hardware [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:35:01Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1235.721677] env[69927]: DEBUG nova.virt.hardware [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1235.721832] env[69927]: DEBUG nova.virt.hardware [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1235.722017] env[69927]: DEBUG nova.virt.hardware [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1235.722167] env[69927]: DEBUG nova.virt.hardware [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1235.722318] env[69927]: DEBUG nova.virt.hardware [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1235.722528] env[69927]: DEBUG nova.virt.hardware [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1235.722708] env[69927]: DEBUG nova.virt.hardware [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1235.722890] env[69927]: DEBUG nova.virt.hardware [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1235.723124] env[69927]: DEBUG nova.virt.hardware [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1235.723403] env[69927]: DEBUG nova.virt.hardware [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1235.724619] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a5400a-d501-42d8-a7de-dd497240eb9a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.733956] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-749403e6-de2d-44d6-800f-9eeb041e8fd3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.761989] env[69927]: DEBUG oslo_vmware.api [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096838, 'name': PowerOffVM_Task, 'duration_secs': 0.180645} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.762291] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1235.762465] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1235.762726] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d6453967-5fad-4d83-a259-bfd130c066e7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.778161] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 256319c4-817d-4267-8531-a65f0f8cd0b6] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1235.840810] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1235.841132] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1235.841411] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Deleting the datastore file [datastore1] 5c87c74d-5998-4dfc-bc3c-c2887ff25195 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1235.842271] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e3af383f-958b-4c30-992c-3997da8eef41 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.849997] env[69927]: DEBUG oslo_vmware.api [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1235.849997] env[69927]: value = "task-4096841" [ 1235.849997] env[69927]: _type = "Task" [ 1235.849997] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.861186] env[69927]: DEBUG oslo_vmware.api [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096841, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.940309] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096839, 'name': CreateVM_Task, 'duration_secs': 0.460416} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.940497] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1235.941206] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.941376] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1235.941724] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1235.942280] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ec9653b-f540-4491-ba0d-96b0c6a7b2f8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.948053] env[69927]: DEBUG oslo_vmware.api [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1235.948053] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5230a129-3d31-fc42-3bfa-aaa3afcac52a" [ 1235.948053] env[69927]: _type = "Task" [ 1235.948053] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.948947] env[69927]: DEBUG oslo_concurrency.lockutils [None req-002dcfdb-0466-40c2-a8ef-18205da8538d tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "interface-5c87c74d-5998-4dfc-bc3c-c2887ff25195-70d5cdb3-0681-41ab-aa95-e5ae4a5245f3" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.670s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1235.959400] env[69927]: DEBUG oslo_vmware.api [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5230a129-3d31-fc42-3bfa-aaa3afcac52a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.000871] env[69927]: DEBUG nova.objects.instance [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lazy-loading 'numa_topology' on Instance uuid 693a6c6b-8d1c-405e-bb17-73259e28f556 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1236.048364] env[69927]: DEBUG oslo_vmware.api [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096836, 'name': PowerOnVM_Task, 'duration_secs': 0.588161} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.048925] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8d439ae7-3c0f-4d22-bda6-fc748c3b94ae tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "e38222c4-3362-4d47-aee4-d26ccb4cbf3c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.724s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1236.050352] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1236.050667] env[69927]: INFO nova.compute.manager [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Took 7.29 seconds to spawn the instance on the hypervisor. [ 1236.050911] env[69927]: DEBUG nova.compute.manager [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1236.052253] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6350d04c-e361-41ac-8009-0dec42586984 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.161942] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8786197e-917d-4df3-9d12-fcb76b53ad82 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "b007a697-7da4-4c97-9ccb-046d86b27568" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1236.262264] env[69927]: DEBUG nova.network.neutron [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Successfully updated port: f6b836b5-0070-4fdc-8398-6bd6efe3e550 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1236.281680] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 95c02aa2-d587-4c9f-9b02-2992dfe5b1be] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1236.365244] env[69927]: DEBUG oslo_vmware.api [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096841, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160651} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.368908] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1236.369304] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1236.369636] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1236.369940] env[69927]: INFO nova.compute.manager [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1236.370306] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1236.370708] env[69927]: DEBUG nova.compute.manager [-] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1236.370999] env[69927]: DEBUG nova.network.neutron [-] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1236.466064] env[69927]: DEBUG oslo_vmware.api [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5230a129-3d31-fc42-3bfa-aaa3afcac52a, 'name': SearchDatastore_Task, 'duration_secs': 0.01126} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.466064] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1236.466064] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1236.466064] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1236.466064] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1236.466064] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1236.466064] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-54e2e753-966a-46b6-881d-779570c03f20 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.468216] env[69927]: DEBUG nova.network.neutron [req-d9cb4adf-3949-4f41-8540-68adce842f2d req-ae92c18c-d317-4a50-8c1d-f6d38c034fef service nova] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Updated VIF entry in instance network info cache for port 340dd818-a9e9-44ba-8a3b-a6bf475270e5. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1236.468549] env[69927]: DEBUG nova.network.neutron [req-d9cb4adf-3949-4f41-8540-68adce842f2d req-ae92c18c-d317-4a50-8c1d-f6d38c034fef service nova] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Updating instance_info_cache with network_info: [{"id": "340dd818-a9e9-44ba-8a3b-a6bf475270e5", "address": "fa:16:3e:26:dd:24", "network": {"id": "8b69632f-2333-4f76-bdfe-a301ba92d3b7", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1292205702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2750a80142e4dc1b12a6caf543768ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d177c5b3-a5b1-4c78-854e-7e0dbf341ea1", "external-id": "nsx-vlan-transportzone-54", "segmentation_id": 54, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap340dd818-a9", "ovs_interfaceid": "340dd818-a9e9-44ba-8a3b-a6bf475270e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.491306] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1236.491544] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1236.492834] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05d60dfe-c4e8-4f07-bf67-642004cd9366 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.499483] env[69927]: DEBUG oslo_vmware.api [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1236.499483] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e6f81d-ee41-436e-8eb7-100902c37744" [ 1236.499483] env[69927]: _type = "Task" [ 1236.499483] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.502953] env[69927]: INFO nova.compute.claims [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1236.512279] env[69927]: DEBUG oslo_vmware.api [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e6f81d-ee41-436e-8eb7-100902c37744, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.573145] env[69927]: INFO nova.compute.manager [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Took 13.27 seconds to build instance. [ 1236.753237] env[69927]: DEBUG oslo_concurrency.lockutils [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "07814f60-1886-4b06-bcf7-e2c9b95a4501" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1236.753237] env[69927]: DEBUG oslo_concurrency.lockutils [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "07814f60-1886-4b06-bcf7-e2c9b95a4501" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1236.753461] env[69927]: DEBUG oslo_concurrency.lockutils [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "07814f60-1886-4b06-bcf7-e2c9b95a4501-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1236.753563] env[69927]: DEBUG oslo_concurrency.lockutils [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "07814f60-1886-4b06-bcf7-e2c9b95a4501-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1236.753736] env[69927]: DEBUG oslo_concurrency.lockutils [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "07814f60-1886-4b06-bcf7-e2c9b95a4501-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1236.759385] env[69927]: INFO nova.compute.manager [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Terminating instance [ 1236.762857] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "refresh_cache-853d85d4-e98f-4810-a8db-b2a820ebc071" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1236.762938] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquired lock "refresh_cache-853d85d4-e98f-4810-a8db-b2a820ebc071" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1236.763032] env[69927]: DEBUG nova.network.neutron [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1236.786094] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 77c6ce9e-5e15-41e4-aa81-1ef01248aa32] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1236.972068] env[69927]: DEBUG oslo_concurrency.lockutils [req-d9cb4adf-3949-4f41-8540-68adce842f2d req-ae92c18c-d317-4a50-8c1d-f6d38c034fef service nova] Releasing lock "refresh_cache-659e2584-88a8-4382-98c8-f50fcab78e0c" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1237.024290] env[69927]: DEBUG oslo_vmware.api [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e6f81d-ee41-436e-8eb7-100902c37744, 'name': SearchDatastore_Task, 'duration_secs': 0.033588} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.026263] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7b102f5-7959-4b87-a687-15c72422d77a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.033392] env[69927]: DEBUG oslo_vmware.api [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1237.033392] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ae6679-751d-4edf-025f-a1a70a1cf393" [ 1237.033392] env[69927]: _type = "Task" [ 1237.033392] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.045734] env[69927]: DEBUG oslo_vmware.api [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ae6679-751d-4edf-025f-a1a70a1cf393, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.075776] env[69927]: DEBUG oslo_concurrency.lockutils [None req-159d7622-6fbc-4d38-a8c3-7aab68d38602 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.781s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.245619] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8786197e-917d-4df3-9d12-fcb76b53ad82 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "b007a697-7da4-4c97-9ccb-046d86b27568" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.245855] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8786197e-917d-4df3-9d12-fcb76b53ad82 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "b007a697-7da4-4c97-9ccb-046d86b27568" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1237.246102] env[69927]: INFO nova.compute.manager [None req-8786197e-917d-4df3-9d12-fcb76b53ad82 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Attaching volume 2e7c1ed4-9d5c-4f45-aaf5-2b53acd1d257 to /dev/sdb [ 1237.268832] env[69927]: DEBUG nova.compute.manager [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1237.269073] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1237.269991] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff165642-703d-478c-9cf0-f4bbd9731d30 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.277842] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bec932a-b80c-4e66-9f38-801154cca7ed {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.283324] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1237.284018] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d6352ac6-8297-4c6b-8333-fd824d120509 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.288915] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 9348e368-cc3c-4bde-91ae-26fd03ad536a] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1237.295053] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c438710b-4892-43db-b941-53402268a687 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.296220] env[69927]: DEBUG oslo_vmware.api [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1237.296220] env[69927]: value = "task-4096842" [ 1237.296220] env[69927]: _type = "Task" [ 1237.296220] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.309702] env[69927]: DEBUG oslo_vmware.api [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096842, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.315344] env[69927]: DEBUG nova.network.neutron [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1237.317740] env[69927]: DEBUG nova.virt.block_device [None req-8786197e-917d-4df3-9d12-fcb76b53ad82 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Updating existing volume attachment record: febe322e-1f76-4c67-a4bb-eb5482eba28e {{(pid=69927) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1237.472785] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-818efc27-88c9-408f-b844-0a122c1793a5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Volume attach. Driver type: vmdk {{(pid=69927) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1237.473109] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-818efc27-88c9-408f-b844-0a122c1793a5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811592', 'volume_id': 'f5e28970-f462-4070-87f2-2e1e9b4cab4f', 'name': 'volume-f5e28970-f462-4070-87f2-2e1e9b4cab4f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8b70b479-4a54-4bcb-813d-16cc0c9a67c5', 'attached_at': '', 'detached_at': '', 'volume_id': 'f5e28970-f462-4070-87f2-2e1e9b4cab4f', 'serial': 'f5e28970-f462-4070-87f2-2e1e9b4cab4f'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1237.474604] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-683b5f21-b91c-4a29-bac4-d073dbe492e0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.496744] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac478df-e886-4477-be1f-3a5d7c18b931 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.528206] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-818efc27-88c9-408f-b844-0a122c1793a5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] volume-f5e28970-f462-4070-87f2-2e1e9b4cab4f/volume-f5e28970-f462-4070-87f2-2e1e9b4cab4f.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1237.528873] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d728ac1a-8de1-4992-8298-c9a18db8bf7c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.545871] env[69927]: DEBUG nova.network.neutron [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Updating instance_info_cache with network_info: [{"id": "f6b836b5-0070-4fdc-8398-6bd6efe3e550", "address": "fa:16:3e:84:eb:cb", "network": {"id": "8b69632f-2333-4f76-bdfe-a301ba92d3b7", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1292205702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2750a80142e4dc1b12a6caf543768ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d177c5b3-a5b1-4c78-854e-7e0dbf341ea1", "external-id": "nsx-vlan-transportzone-54", "segmentation_id": 54, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6b836b5-00", "ovs_interfaceid": "f6b836b5-0070-4fdc-8398-6bd6efe3e550", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1237.573450] env[69927]: DEBUG oslo_vmware.api [None req-818efc27-88c9-408f-b844-0a122c1793a5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1237.573450] env[69927]: value = "task-4096845" [ 1237.573450] env[69927]: _type = "Task" [ 1237.573450] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.573592] env[69927]: DEBUG oslo_vmware.api [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ae6679-751d-4edf-025f-a1a70a1cf393, 'name': SearchDatastore_Task, 'duration_secs': 0.011951} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.574063] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1237.574408] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 659e2584-88a8-4382-98c8-f50fcab78e0c/659e2584-88a8-4382-98c8-f50fcab78e0c.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1237.579061] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e2cdb83c-7698-4f80-b2b9-7822cc3834c7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.594995] env[69927]: DEBUG oslo_vmware.api [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1237.594995] env[69927]: value = "task-4096846" [ 1237.594995] env[69927]: _type = "Task" [ 1237.594995] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.602060] env[69927]: DEBUG oslo_vmware.api [None req-818efc27-88c9-408f-b844-0a122c1793a5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096845, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.612942] env[69927]: DEBUG oslo_vmware.api [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096846, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.735760] env[69927]: DEBUG nova.compute.manager [req-9835d41d-e927-4bbf-8995-23a8391f20be req-e5026721-3d5b-44bf-9b37-da09b8293318 service nova] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Received event network-vif-plugged-f6b836b5-0070-4fdc-8398-6bd6efe3e550 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1237.739092] env[69927]: DEBUG oslo_concurrency.lockutils [req-9835d41d-e927-4bbf-8995-23a8391f20be req-e5026721-3d5b-44bf-9b37-da09b8293318 service nova] Acquiring lock "853d85d4-e98f-4810-a8db-b2a820ebc071-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.739845] env[69927]: DEBUG oslo_concurrency.lockutils [req-9835d41d-e927-4bbf-8995-23a8391f20be req-e5026721-3d5b-44bf-9b37-da09b8293318 service nova] Lock "853d85d4-e98f-4810-a8db-b2a820ebc071-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.003s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1237.739845] env[69927]: DEBUG oslo_concurrency.lockutils [req-9835d41d-e927-4bbf-8995-23a8391f20be req-e5026721-3d5b-44bf-9b37-da09b8293318 service nova] Lock "853d85d4-e98f-4810-a8db-b2a820ebc071-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.739845] env[69927]: DEBUG nova.compute.manager [req-9835d41d-e927-4bbf-8995-23a8391f20be req-e5026721-3d5b-44bf-9b37-da09b8293318 service nova] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] No waiting events found dispatching network-vif-plugged-f6b836b5-0070-4fdc-8398-6bd6efe3e550 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1237.744020] env[69927]: WARNING nova.compute.manager [req-9835d41d-e927-4bbf-8995-23a8391f20be req-e5026721-3d5b-44bf-9b37-da09b8293318 service nova] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Received unexpected event network-vif-plugged-f6b836b5-0070-4fdc-8398-6bd6efe3e550 for instance with vm_state building and task_state spawning. [ 1237.744020] env[69927]: DEBUG nova.compute.manager [req-9835d41d-e927-4bbf-8995-23a8391f20be req-e5026721-3d5b-44bf-9b37-da09b8293318 service nova] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Received event network-changed-f6b836b5-0070-4fdc-8398-6bd6efe3e550 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1237.744020] env[69927]: DEBUG nova.compute.manager [req-9835d41d-e927-4bbf-8995-23a8391f20be req-e5026721-3d5b-44bf-9b37-da09b8293318 service nova] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Refreshing instance network info cache due to event network-changed-f6b836b5-0070-4fdc-8398-6bd6efe3e550. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1237.744020] env[69927]: DEBUG oslo_concurrency.lockutils [req-9835d41d-e927-4bbf-8995-23a8391f20be req-e5026721-3d5b-44bf-9b37-da09b8293318 service nova] Acquiring lock "refresh_cache-853d85d4-e98f-4810-a8db-b2a820ebc071" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.798698] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1237.798698] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Cleaning up deleted instances with incomplete migration {{(pid=69927) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 1237.817783] env[69927]: DEBUG oslo_vmware.api [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096842, 'name': PowerOffVM_Task, 'duration_secs': 0.321369} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.820283] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1237.820283] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1237.820283] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a39d2a36-8d3e-4003-8b87-de111135fedd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.824864] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f5fa235-5588-4d65-8b33-386d9f0a323b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.836292] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9af710f3-7908-45f6-86ea-408198b6bfc6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.873034] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ee6bc8-1b66-49be-8d65-f9eea964034d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.885024] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67e2731c-4c6f-4340-a47e-6da3973bc579 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.903995] env[69927]: DEBUG nova.compute.provider_tree [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1237.917345] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1237.917729] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1237.917833] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Deleting the datastore file [datastore2] 07814f60-1886-4b06-bcf7-e2c9b95a4501 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1237.918322] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b669ea10-edfa-471e-86ba-f48ce79817b0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.931749] env[69927]: DEBUG oslo_vmware.api [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1237.931749] env[69927]: value = "task-4096849" [ 1237.931749] env[69927]: _type = "Task" [ 1237.931749] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.944185] env[69927]: DEBUG oslo_vmware.api [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096849, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.963376] env[69927]: DEBUG nova.network.neutron [-] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1238.053373] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Releasing lock "refresh_cache-853d85d4-e98f-4810-a8db-b2a820ebc071" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1238.053782] env[69927]: DEBUG nova.compute.manager [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Instance network_info: |[{"id": "f6b836b5-0070-4fdc-8398-6bd6efe3e550", "address": "fa:16:3e:84:eb:cb", "network": {"id": "8b69632f-2333-4f76-bdfe-a301ba92d3b7", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1292205702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2750a80142e4dc1b12a6caf543768ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d177c5b3-a5b1-4c78-854e-7e0dbf341ea1", "external-id": "nsx-vlan-transportzone-54", "segmentation_id": 54, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6b836b5-00", "ovs_interfaceid": "f6b836b5-0070-4fdc-8398-6bd6efe3e550", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1238.054473] env[69927]: DEBUG oslo_concurrency.lockutils [req-9835d41d-e927-4bbf-8995-23a8391f20be req-e5026721-3d5b-44bf-9b37-da09b8293318 service nova] Acquired lock "refresh_cache-853d85d4-e98f-4810-a8db-b2a820ebc071" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1238.054473] env[69927]: DEBUG nova.network.neutron [req-9835d41d-e927-4bbf-8995-23a8391f20be req-e5026721-3d5b-44bf-9b37-da09b8293318 service nova] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Refreshing network info cache for port f6b836b5-0070-4fdc-8398-6bd6efe3e550 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1238.055735] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:eb:cb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd177c5b3-a5b1-4c78-854e-7e0dbf341ea1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f6b836b5-0070-4fdc-8398-6bd6efe3e550', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1238.065862] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1238.069368] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1238.070098] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aa1c8b4a-1c65-4de5-a7f1-eb02dbe823aa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.097642] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1238.097642] env[69927]: value = "task-4096850" [ 1238.097642] env[69927]: _type = "Task" [ 1238.097642] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.102260] env[69927]: DEBUG oslo_vmware.api [None req-818efc27-88c9-408f-b844-0a122c1793a5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096845, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.116230] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096850, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.120301] env[69927]: DEBUG oslo_vmware.api [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096846, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524617} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.123547] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 659e2584-88a8-4382-98c8-f50fcab78e0c/659e2584-88a8-4382-98c8-f50fcab78e0c.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1238.124026] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1238.124109] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-407ba407-1947-4888-925e-023396025198 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.132933] env[69927]: DEBUG oslo_vmware.api [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1238.132933] env[69927]: value = "task-4096851" [ 1238.132933] env[69927]: _type = "Task" [ 1238.132933] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.142742] env[69927]: DEBUG oslo_vmware.api [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096851, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.328310] env[69927]: DEBUG nova.network.neutron [req-9835d41d-e927-4bbf-8995-23a8391f20be req-e5026721-3d5b-44bf-9b37-da09b8293318 service nova] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Updated VIF entry in instance network info cache for port f6b836b5-0070-4fdc-8398-6bd6efe3e550. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1238.328747] env[69927]: DEBUG nova.network.neutron [req-9835d41d-e927-4bbf-8995-23a8391f20be req-e5026721-3d5b-44bf-9b37-da09b8293318 service nova] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Updating instance_info_cache with network_info: [{"id": "f6b836b5-0070-4fdc-8398-6bd6efe3e550", "address": "fa:16:3e:84:eb:cb", "network": {"id": "8b69632f-2333-4f76-bdfe-a301ba92d3b7", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1292205702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2750a80142e4dc1b12a6caf543768ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d177c5b3-a5b1-4c78-854e-7e0dbf341ea1", "external-id": "nsx-vlan-transportzone-54", "segmentation_id": 54, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6b836b5-00", "ovs_interfaceid": "f6b836b5-0070-4fdc-8398-6bd6efe3e550", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1238.407962] env[69927]: DEBUG nova.scheduler.client.report [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1238.443557] env[69927]: DEBUG oslo_vmware.api [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096849, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.295442} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.443867] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1238.444114] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1238.444333] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1238.444582] env[69927]: INFO nova.compute.manager [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1238.444861] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1238.445068] env[69927]: DEBUG nova.compute.manager [-] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1238.445181] env[69927]: DEBUG nova.network.neutron [-] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1238.467447] env[69927]: INFO nova.compute.manager [-] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Took 2.10 seconds to deallocate network for instance. [ 1238.598525] env[69927]: DEBUG oslo_vmware.api [None req-818efc27-88c9-408f-b844-0a122c1793a5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096845, 'name': ReconfigVM_Task, 'duration_secs': 0.55418} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.598968] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-818efc27-88c9-408f-b844-0a122c1793a5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Reconfigured VM instance instance-0000006a to attach disk [datastore2] volume-f5e28970-f462-4070-87f2-2e1e9b4cab4f/volume-f5e28970-f462-4070-87f2-2e1e9b4cab4f.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1238.604075] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74bfbeba-bf83-46a6-9811-08baa3f4bd87 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.624757] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096850, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.630039] env[69927]: DEBUG oslo_vmware.api [None req-818efc27-88c9-408f-b844-0a122c1793a5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1238.630039] env[69927]: value = "task-4096852" [ 1238.630039] env[69927]: _type = "Task" [ 1238.630039] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.635801] env[69927]: DEBUG oslo_vmware.api [None req-818efc27-88c9-408f-b844-0a122c1793a5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096852, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.646396] env[69927]: DEBUG oslo_vmware.api [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096851, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079327} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.646707] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1238.649709] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca6660c1-3043-492c-9b9e-0153c2a1df0e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.676125] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] 659e2584-88a8-4382-98c8-f50fcab78e0c/659e2584-88a8-4382-98c8-f50fcab78e0c.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1238.676125] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d218ca3e-e49b-4135-ad90-b6da98d1f6a1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.698191] env[69927]: DEBUG oslo_vmware.api [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1238.698191] env[69927]: value = "task-4096853" [ 1238.698191] env[69927]: _type = "Task" [ 1238.698191] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.712268] env[69927]: DEBUG oslo_vmware.api [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096853, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.831934] env[69927]: DEBUG oslo_concurrency.lockutils [req-9835d41d-e927-4bbf-8995-23a8391f20be req-e5026721-3d5b-44bf-9b37-da09b8293318 service nova] Releasing lock "refresh_cache-853d85d4-e98f-4810-a8db-b2a820ebc071" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1238.832251] env[69927]: DEBUG nova.compute.manager [req-9835d41d-e927-4bbf-8995-23a8391f20be req-e5026721-3d5b-44bf-9b37-da09b8293318 service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Received event network-vif-deleted-a54251d6-cc17-4c26-95aa-a11a00c08e5e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1238.832434] env[69927]: INFO nova.compute.manager [req-9835d41d-e927-4bbf-8995-23a8391f20be req-e5026721-3d5b-44bf-9b37-da09b8293318 service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Neutron deleted interface a54251d6-cc17-4c26-95aa-a11a00c08e5e; detaching it from the instance and deleting it from the info cache [ 1238.832605] env[69927]: DEBUG nova.network.neutron [req-9835d41d-e927-4bbf-8995-23a8391f20be req-e5026721-3d5b-44bf-9b37-da09b8293318 service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1238.915017] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.420s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1238.946783] env[69927]: INFO nova.network.neutron [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Updating port d201dadc-ab89-4ede-8c29-41217e3af341 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1238.975302] env[69927]: DEBUG oslo_concurrency.lockutils [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1238.975751] env[69927]: DEBUG oslo_concurrency.lockutils [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1238.976013] env[69927]: DEBUG nova.objects.instance [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lazy-loading 'resources' on Instance uuid 5c87c74d-5998-4dfc-bc3c-c2887ff25195 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1239.118236] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096850, 'name': CreateVM_Task, 'duration_secs': 0.550781} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.118439] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1239.121403] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.121403] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1239.121403] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1239.121403] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a44befe-f928-405e-94e7-fc770bf65088 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.129767] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1239.129767] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529a86cc-e397-ff86-17d4-08e33e0eb63f" [ 1239.129767] env[69927]: _type = "Task" [ 1239.129767] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.144793] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529a86cc-e397-ff86-17d4-08e33e0eb63f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.149761] env[69927]: DEBUG oslo_vmware.api [None req-818efc27-88c9-408f-b844-0a122c1793a5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096852, 'name': ReconfigVM_Task, 'duration_secs': 0.159945} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.150336] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-818efc27-88c9-408f-b844-0a122c1793a5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811592', 'volume_id': 'f5e28970-f462-4070-87f2-2e1e9b4cab4f', 'name': 'volume-f5e28970-f462-4070-87f2-2e1e9b4cab4f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8b70b479-4a54-4bcb-813d-16cc0c9a67c5', 'attached_at': '', 'detached_at': '', 'volume_id': 'f5e28970-f462-4070-87f2-2e1e9b4cab4f', 'serial': 'f5e28970-f462-4070-87f2-2e1e9b4cab4f'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1239.215640] env[69927]: DEBUG oslo_vmware.api [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096853, 'name': ReconfigVM_Task, 'duration_secs': 0.328941} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.215640] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Reconfigured VM instance instance-00000071 to attach disk [datastore1] 659e2584-88a8-4382-98c8-f50fcab78e0c/659e2584-88a8-4382-98c8-f50fcab78e0c.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1239.215640] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cd30d33f-bac6-42db-ad32-8300be2a3061 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.223706] env[69927]: DEBUG oslo_vmware.api [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1239.223706] env[69927]: value = "task-4096854" [ 1239.223706] env[69927]: _type = "Task" [ 1239.223706] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.239675] env[69927]: DEBUG oslo_vmware.api [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096854, 'name': Rename_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.299047] env[69927]: DEBUG nova.compute.manager [req-367818a3-2579-4dd1-bb78-1751c07f8318 req-a1d55a81-4ef7-4bfa-9fee-8f38f958341b service nova] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Received event network-vif-deleted-d355060d-92db-48c9-ac0c-a82f6c92c904 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1239.299569] env[69927]: INFO nova.compute.manager [req-367818a3-2579-4dd1-bb78-1751c07f8318 req-a1d55a81-4ef7-4bfa-9fee-8f38f958341b service nova] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Neutron deleted interface d355060d-92db-48c9-ac0c-a82f6c92c904; detaching it from the instance and deleting it from the info cache [ 1239.299757] env[69927]: DEBUG nova.network.neutron [req-367818a3-2579-4dd1-bb78-1751c07f8318 req-a1d55a81-4ef7-4bfa-9fee-8f38f958341b service nova] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.336318] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-294c5cc5-8a08-4fef-b829-670bd4c660db {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.346701] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b85f077b-671c-4dd9-b3e3-de8b8649e40b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.384664] env[69927]: DEBUG nova.compute.manager [req-9835d41d-e927-4bbf-8995-23a8391f20be req-e5026721-3d5b-44bf-9b37-da09b8293318 service nova] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Detach interface failed, port_id=a54251d6-cc17-4c26-95aa-a11a00c08e5e, reason: Instance 5c87c74d-5998-4dfc-bc3c-c2887ff25195 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1239.635716] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529a86cc-e397-ff86-17d4-08e33e0eb63f, 'name': SearchDatastore_Task, 'duration_secs': 0.041337} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.638521] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1239.639828] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1239.639828] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.639828] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1239.639828] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1239.640051] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14ed12b2-d727-4122-b2c7-b507d0800458 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.651793] env[69927]: DEBUG nova.network.neutron [-] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.655895] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1239.656084] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1239.659996] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da7d0111-0351-4349-a9cc-c4cb7202e86d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.663297] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1239.663297] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f99eac-68db-49f9-73b2-66ed9d859f09" [ 1239.663297] env[69927]: _type = "Task" [ 1239.663297] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.672475] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f99eac-68db-49f9-73b2-66ed9d859f09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.686710] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0adc8542-6bcb-4704-9cb0-ecc8609e096a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.695093] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7cf9205-8435-44b2-8051-04eaab888ffe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.729776] env[69927]: DEBUG nova.objects.instance [None req-818efc27-88c9-408f-b844-0a122c1793a5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lazy-loading 'flavor' on Instance uuid 8b70b479-4a54-4bcb-813d-16cc0c9a67c5 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1239.734803] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17459dda-2e56-4d2a-8c78-bc2fb459b89e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.744866] env[69927]: DEBUG oslo_vmware.api [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096854, 'name': Rename_Task, 'duration_secs': 0.349606} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.747426] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1239.747763] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6cb195bb-1b67-4dcd-bc1e-292c4919f353 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.750426] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e6ba0b-ae71-47af-889b-421ed2330369 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.767162] env[69927]: DEBUG nova.compute.provider_tree [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1239.769832] env[69927]: DEBUG oslo_vmware.api [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1239.769832] env[69927]: value = "task-4096856" [ 1239.769832] env[69927]: _type = "Task" [ 1239.769832] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.778878] env[69927]: DEBUG oslo_vmware.api [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096856, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.804332] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-92ca05e9-fdca-450f-94cf-340059de57fa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.815032] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1cc63cf-591c-4892-ac35-15d8104e2d31 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.852230] env[69927]: DEBUG nova.compute.manager [req-367818a3-2579-4dd1-bb78-1751c07f8318 req-a1d55a81-4ef7-4bfa-9fee-8f38f958341b service nova] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Detach interface failed, port_id=d355060d-92db-48c9-ac0c-a82f6c92c904, reason: Instance 07814f60-1886-4b06-bcf7-e2c9b95a4501 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1240.158135] env[69927]: INFO nova.compute.manager [-] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Took 1.71 seconds to deallocate network for instance. [ 1240.176651] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f99eac-68db-49f9-73b2-66ed9d859f09, 'name': SearchDatastore_Task, 'duration_secs': 0.010582} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.177849] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f4b47d2-83ff-42a5-b7d5-3e0d01bc92a9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.186235] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1240.186235] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]524a2d18-8528-6204-25cd-3c911f68b4ce" [ 1240.186235] env[69927]: _type = "Task" [ 1240.186235] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.199291] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]524a2d18-8528-6204-25cd-3c911f68b4ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.244848] env[69927]: DEBUG oslo_concurrency.lockutils [None req-818efc27-88c9-408f-b844-0a122c1793a5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "8b70b479-4a54-4bcb-813d-16cc0c9a67c5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.917s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1240.272564] env[69927]: DEBUG nova.scheduler.client.report [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1240.292295] env[69927]: DEBUG oslo_vmware.api [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096856, 'name': PowerOnVM_Task, 'duration_secs': 0.519001} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.292295] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1240.292295] env[69927]: INFO nova.compute.manager [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Took 6.92 seconds to spawn the instance on the hypervisor. [ 1240.293034] env[69927]: DEBUG nova.compute.manager [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1240.293630] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e822091-a490-469d-add4-3cd24132f37a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.600393] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "refresh_cache-693a6c6b-8d1c-405e-bb17-73259e28f556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.600748] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquired lock "refresh_cache-693a6c6b-8d1c-405e-bb17-73259e28f556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1240.600748] env[69927]: DEBUG nova.network.neutron [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1240.661454] env[69927]: DEBUG nova.compute.manager [req-f5e6ed9f-5cdf-4df2-9c5f-eb57803ca880 req-c85323ba-8b42-4c6e-88fa-d5455ed8044d service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Received event network-vif-plugged-d201dadc-ab89-4ede-8c29-41217e3af341 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1240.661627] env[69927]: DEBUG oslo_concurrency.lockutils [req-f5e6ed9f-5cdf-4df2-9c5f-eb57803ca880 req-c85323ba-8b42-4c6e-88fa-d5455ed8044d service nova] Acquiring lock "693a6c6b-8d1c-405e-bb17-73259e28f556-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1240.662234] env[69927]: DEBUG oslo_concurrency.lockutils [req-f5e6ed9f-5cdf-4df2-9c5f-eb57803ca880 req-c85323ba-8b42-4c6e-88fa-d5455ed8044d service nova] Lock "693a6c6b-8d1c-405e-bb17-73259e28f556-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1240.662234] env[69927]: DEBUG oslo_concurrency.lockutils [req-f5e6ed9f-5cdf-4df2-9c5f-eb57803ca880 req-c85323ba-8b42-4c6e-88fa-d5455ed8044d service nova] Lock "693a6c6b-8d1c-405e-bb17-73259e28f556-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1240.662715] env[69927]: DEBUG nova.compute.manager [req-f5e6ed9f-5cdf-4df2-9c5f-eb57803ca880 req-c85323ba-8b42-4c6e-88fa-d5455ed8044d service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] No waiting events found dispatching network-vif-plugged-d201dadc-ab89-4ede-8c29-41217e3af341 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1240.662906] env[69927]: WARNING nova.compute.manager [req-f5e6ed9f-5cdf-4df2-9c5f-eb57803ca880 req-c85323ba-8b42-4c6e-88fa-d5455ed8044d service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Received unexpected event network-vif-plugged-d201dadc-ab89-4ede-8c29-41217e3af341 for instance with vm_state shelved_offloaded and task_state spawning. [ 1240.669728] env[69927]: DEBUG oslo_concurrency.lockutils [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1240.698014] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]524a2d18-8528-6204-25cd-3c911f68b4ce, 'name': SearchDatastore_Task, 'duration_secs': 0.011365} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.698473] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1240.698813] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 853d85d4-e98f-4810-a8db-b2a820ebc071/853d85d4-e98f-4810-a8db-b2a820ebc071.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1240.699111] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-05bce51f-f7c2-4440-85d1-e81a1cad8778 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.707574] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1240.707574] env[69927]: value = "task-4096857" [ 1240.707574] env[69927]: _type = "Task" [ 1240.707574] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.718758] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096857, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.785470] env[69927]: DEBUG oslo_concurrency.lockutils [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.810s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1240.788164] env[69927]: DEBUG oslo_concurrency.lockutils [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.118s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1240.788419] env[69927]: DEBUG nova.objects.instance [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lazy-loading 'resources' on Instance uuid 07814f60-1886-4b06-bcf7-e2c9b95a4501 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1240.802717] env[69927]: INFO nova.scheduler.client.report [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Deleted allocations for instance 5c87c74d-5998-4dfc-bc3c-c2887ff25195 [ 1240.824948] env[69927]: INFO nova.compute.manager [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Took 16.66 seconds to build instance. [ 1241.219691] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096857, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478595} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.220247] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 853d85d4-e98f-4810-a8db-b2a820ebc071/853d85d4-e98f-4810-a8db-b2a820ebc071.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1241.220247] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1241.222757] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-80e577f8-8f17-463c-a6e6-bfb7fb632fc3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.231618] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1241.231618] env[69927]: value = "task-4096858" [ 1241.231618] env[69927]: _type = "Task" [ 1241.231618] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.244168] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096858, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.311910] env[69927]: DEBUG oslo_concurrency.lockutils [None req-632b1dae-ef5f-4ede-8165-5a6bc83ca01f tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "5c87c74d-5998-4dfc-bc3c-c2887ff25195" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.584s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1241.327901] env[69927]: DEBUG oslo_concurrency.lockutils [None req-08e5d664-778e-4a75-87fa-1ce833164e8e tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "659e2584-88a8-4382-98c8-f50fcab78e0c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.172s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1241.366686] env[69927]: DEBUG nova.network.neutron [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Updating instance_info_cache with network_info: [{"id": "d201dadc-ab89-4ede-8c29-41217e3af341", "address": "fa:16:3e:6b:c9:4b", "network": {"id": "4a19c4d6-f840-486a-b113-3ec076b5b34d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-437129762-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.157", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b7ae5270b0643e6b5720d4f2f765d74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "510d3c47-3615-43d5-aa5d-a279fd915e71", "external-id": "nsx-vlan-transportzone-436", "segmentation_id": 436, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd201dadc-ab", "ovs_interfaceid": "d201dadc-ab89-4ede-8c29-41217e3af341", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1241.530020] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d34671-f6b2-4b3c-ab2c-b86c0d6c539d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.537109] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee9be24-b256-4728-bfd2-754f41ecd162 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.574098] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635e6300-18aa-487b-8462-16ea9ffa5a9a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.582877] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edcb1447-9d7d-4f6f-9bf6-354c877b57e1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.599502] env[69927]: DEBUG nova.compute.provider_tree [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1241.691886] env[69927]: DEBUG nova.compute.manager [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Stashing vm_state: active {{(pid=69927) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1241.746156] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096858, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07227} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.746156] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1241.746156] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23dacff5-fbf2-47aa-8187-79abc2cb778b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.773021] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 853d85d4-e98f-4810-a8db-b2a820ebc071/853d85d4-e98f-4810-a8db-b2a820ebc071.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1241.773021] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d456b69-cca9-4071-bb56-1c42125a66d1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.793068] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1241.793068] env[69927]: value = "task-4096859" [ 1241.793068] env[69927]: _type = "Task" [ 1241.793068] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.803635] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096859, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.869907] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Releasing lock "refresh_cache-693a6c6b-8d1c-405e-bb17-73259e28f556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1241.882378] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-8786197e-917d-4df3-9d12-fcb76b53ad82 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Volume attach. Driver type: vmdk {{(pid=69927) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1241.882617] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-8786197e-917d-4df3-9d12-fcb76b53ad82 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811595', 'volume_id': '2e7c1ed4-9d5c-4f45-aaf5-2b53acd1d257', 'name': 'volume-2e7c1ed4-9d5c-4f45-aaf5-2b53acd1d257', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b007a697-7da4-4c97-9ccb-046d86b27568', 'attached_at': '', 'detached_at': '', 'volume_id': '2e7c1ed4-9d5c-4f45-aaf5-2b53acd1d257', 'serial': '2e7c1ed4-9d5c-4f45-aaf5-2b53acd1d257'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1241.883535] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dce0a867-c386-49a3-ac25-b5130f115122 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.901966] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d90bca76-84fa-4d42-b267-d41d56aab27f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.928050] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-8786197e-917d-4df3-9d12-fcb76b53ad82 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] volume-2e7c1ed4-9d5c-4f45-aaf5-2b53acd1d257/volume-2e7c1ed4-9d5c-4f45-aaf5-2b53acd1d257.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1241.930709] env[69927]: DEBUG nova.virt.hardware [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='3ddc7f9ce29539f49cce586400b81daf',container_format='bare',created_at=2025-05-13T19:45:52Z,direct_url=,disk_format='vmdk',id=87cd0321-c9d5-427e-8af6-c3bd78649765,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-2045205320-shelved',owner='3b7ae5270b0643e6b5720d4f2f765d74',properties=ImageMetaProps,protected=,size=31668224,status='active',tags=,updated_at=2025-05-13T19:46:11Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1241.930993] env[69927]: DEBUG nova.virt.hardware [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1241.931206] env[69927]: DEBUG nova.virt.hardware [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1241.931437] env[69927]: DEBUG nova.virt.hardware [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1241.931657] env[69927]: DEBUG nova.virt.hardware [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1241.931822] env[69927]: DEBUG nova.virt.hardware [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1241.932038] env[69927]: DEBUG nova.virt.hardware [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1241.932202] env[69927]: DEBUG nova.virt.hardware [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1241.932371] env[69927]: DEBUG nova.virt.hardware [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1241.932527] env[69927]: DEBUG nova.virt.hardware [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1241.932692] env[69927]: DEBUG nova.virt.hardware [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1241.932975] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d4a07cc3-cc66-4792-b417-bd83e06d419e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.946391] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e54f882a-c1a5-4b66-bf44-37bc032b3244 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.955978] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d0ecdb-daa0-43c0-a891-7b7854cf03a0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.960276] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "d548ea75-9c1f-4884-b338-194f1b5d62ef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1241.960618] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "d548ea75-9c1f-4884-b338-194f1b5d62ef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1241.960727] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "d548ea75-9c1f-4884-b338-194f1b5d62ef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1241.960874] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "d548ea75-9c1f-4884-b338-194f1b5d62ef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1241.961039] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "d548ea75-9c1f-4884-b338-194f1b5d62ef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1241.962695] env[69927]: DEBUG oslo_vmware.api [None req-8786197e-917d-4df3-9d12-fcb76b53ad82 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1241.962695] env[69927]: value = "task-4096860" [ 1241.962695] env[69927]: _type = "Task" [ 1241.962695] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.963153] env[69927]: INFO nova.compute.manager [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Terminating instance [ 1241.977578] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6b:c9:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '510d3c47-3615-43d5-aa5d-a279fd915e71', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd201dadc-ab89-4ede-8c29-41217e3af341', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1241.985220] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1241.986756] env[69927]: DEBUG nova.compute.manager [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1241.986964] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1241.987742] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1241.988836] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa7c517-f92d-4c30-a608-d013fdc35cae {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.994978] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-920aa400-5672-4292-a140-fb056c5c1774 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.009239] env[69927]: DEBUG oslo_vmware.api [None req-8786197e-917d-4df3-9d12-fcb76b53ad82 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096860, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.014542] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1242.015765] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad367e98-34f1-422c-a8b5-c5f73941a6af {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.017224] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1242.017224] env[69927]: value = "task-4096861" [ 1242.017224] env[69927]: _type = "Task" [ 1242.017224] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.022186] env[69927]: DEBUG oslo_vmware.api [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1242.022186] env[69927]: value = "task-4096862" [ 1242.022186] env[69927]: _type = "Task" [ 1242.022186] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.028312] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096861, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.033714] env[69927]: DEBUG oslo_vmware.api [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096862, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.106575] env[69927]: DEBUG nova.scheduler.client.report [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1242.219079] env[69927]: DEBUG oslo_concurrency.lockutils [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1242.230080] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Acquiring lock "8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1242.230439] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Lock "8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1242.304714] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096859, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.475711] env[69927]: DEBUG oslo_vmware.api [None req-8786197e-917d-4df3-9d12-fcb76b53ad82 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096860, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.529721] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096861, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.534992] env[69927]: DEBUG oslo_vmware.api [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096862, 'name': PowerOffVM_Task, 'duration_secs': 0.282326} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.535144] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1242.535301] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1242.535598] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ce916b53-3572-42da-bacf-51b61e2a3d9b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.612613] env[69927]: DEBUG oslo_concurrency.lockutils [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.824s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1242.615956] env[69927]: DEBUG oslo_concurrency.lockutils [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.397s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1242.617896] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1242.618017] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1242.618209] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Deleting the datastore file [datastore2] d548ea75-9c1f-4884-b338-194f1b5d62ef {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1242.618758] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cb79657f-5fae-4d8f-b4dc-318af885d7f5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.627035] env[69927]: DEBUG oslo_vmware.api [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for the task: (returnval){ [ 1242.627035] env[69927]: value = "task-4096864" [ 1242.627035] env[69927]: _type = "Task" [ 1242.627035] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.644861] env[69927]: DEBUG oslo_vmware.api [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096864, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.648990] env[69927]: INFO nova.scheduler.client.report [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Deleted allocations for instance 07814f60-1886-4b06-bcf7-e2c9b95a4501 [ 1242.733373] env[69927]: DEBUG nova.compute.manager [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1242.749530] env[69927]: DEBUG nova.compute.manager [req-b96252af-2fad-4432-9698-89d3193faf3c req-e48fccd2-e160-4a3e-8977-1d918130dc45 service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Received event network-changed-d201dadc-ab89-4ede-8c29-41217e3af341 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1242.749530] env[69927]: DEBUG nova.compute.manager [req-b96252af-2fad-4432-9698-89d3193faf3c req-e48fccd2-e160-4a3e-8977-1d918130dc45 service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Refreshing instance network info cache due to event network-changed-d201dadc-ab89-4ede-8c29-41217e3af341. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1242.749530] env[69927]: DEBUG oslo_concurrency.lockutils [req-b96252af-2fad-4432-9698-89d3193faf3c req-e48fccd2-e160-4a3e-8977-1d918130dc45 service nova] Acquiring lock "refresh_cache-693a6c6b-8d1c-405e-bb17-73259e28f556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.749530] env[69927]: DEBUG oslo_concurrency.lockutils [req-b96252af-2fad-4432-9698-89d3193faf3c req-e48fccd2-e160-4a3e-8977-1d918130dc45 service nova] Acquired lock "refresh_cache-693a6c6b-8d1c-405e-bb17-73259e28f556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1242.749530] env[69927]: DEBUG nova.network.neutron [req-b96252af-2fad-4432-9698-89d3193faf3c req-e48fccd2-e160-4a3e-8977-1d918130dc45 service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Refreshing network info cache for port d201dadc-ab89-4ede-8c29-41217e3af341 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1242.804898] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096859, 'name': ReconfigVM_Task, 'duration_secs': 0.661075} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.805538] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 853d85d4-e98f-4810-a8db-b2a820ebc071/853d85d4-e98f-4810-a8db-b2a820ebc071.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1242.806375] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c5ed5775-0570-4fae-bcd9-766deb939e4d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.813748] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1242.813748] env[69927]: value = "task-4096865" [ 1242.813748] env[69927]: _type = "Task" [ 1242.813748] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.823565] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096865, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.977405] env[69927]: DEBUG oslo_vmware.api [None req-8786197e-917d-4df3-9d12-fcb76b53ad82 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096860, 'name': ReconfigVM_Task, 'duration_secs': 0.795941} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.977689] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-8786197e-917d-4df3-9d12-fcb76b53ad82 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Reconfigured VM instance instance-0000006b to attach disk [datastore1] volume-2e7c1ed4-9d5c-4f45-aaf5-2b53acd1d257/volume-2e7c1ed4-9d5c-4f45-aaf5-2b53acd1d257.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1242.982796] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1e542b6-dc2e-4bfa-b16f-d37bd7ebd5a2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.005999] env[69927]: DEBUG oslo_vmware.api [None req-8786197e-917d-4df3-9d12-fcb76b53ad82 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1243.005999] env[69927]: value = "task-4096866" [ 1243.005999] env[69927]: _type = "Task" [ 1243.005999] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.014260] env[69927]: DEBUG oslo_vmware.api [None req-8786197e-917d-4df3-9d12-fcb76b53ad82 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096866, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.030801] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096861, 'name': CreateVM_Task, 'duration_secs': 0.605551} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.030801] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1243.030801] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/87cd0321-c9d5-427e-8af6-c3bd78649765" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1243.031018] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquired lock "[datastore2] devstack-image-cache_base/87cd0321-c9d5-427e-8af6-c3bd78649765" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1243.031488] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/87cd0321-c9d5-427e-8af6-c3bd78649765" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1243.031860] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c9ec3f2-da71-4cae-872d-4b0b1c91aa7c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.037445] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1243.037445] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5293758d-dc0d-7395-6e2f-bb76f2ed1378" [ 1243.037445] env[69927]: _type = "Task" [ 1243.037445] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.047972] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5293758d-dc0d-7395-6e2f-bb76f2ed1378, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.122118] env[69927]: INFO nova.compute.claims [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1243.137185] env[69927]: DEBUG oslo_vmware.api [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Task: {'id': task-4096864, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171032} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.137498] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1243.137793] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1243.137915] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1243.138147] env[69927]: INFO nova.compute.manager [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1243.138395] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1243.138633] env[69927]: DEBUG nova.compute.manager [-] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1243.138766] env[69927]: DEBUG nova.network.neutron [-] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1243.161953] env[69927]: DEBUG oslo_concurrency.lockutils [None req-31ad6cba-0a32-4495-bf9e-42c25955fa3d tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "07814f60-1886-4b06-bcf7-e2c9b95a4501" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.409s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1243.257987] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1243.327352] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096865, 'name': Rename_Task, 'duration_secs': 0.153501} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.327623] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1243.327873] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f6fbf852-13ce-4555-87ab-891d4fb6cdd3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.334404] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1243.334404] env[69927]: value = "task-4096867" [ 1243.334404] env[69927]: _type = "Task" [ 1243.334404] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.342977] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096867, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.474541] env[69927]: DEBUG nova.network.neutron [req-b96252af-2fad-4432-9698-89d3193faf3c req-e48fccd2-e160-4a3e-8977-1d918130dc45 service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Updated VIF entry in instance network info cache for port d201dadc-ab89-4ede-8c29-41217e3af341. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1243.475087] env[69927]: DEBUG nova.network.neutron [req-b96252af-2fad-4432-9698-89d3193faf3c req-e48fccd2-e160-4a3e-8977-1d918130dc45 service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Updating instance_info_cache with network_info: [{"id": "d201dadc-ab89-4ede-8c29-41217e3af341", "address": "fa:16:3e:6b:c9:4b", "network": {"id": "4a19c4d6-f840-486a-b113-3ec076b5b34d", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-437129762-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.157", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b7ae5270b0643e6b5720d4f2f765d74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "510d3c47-3615-43d5-aa5d-a279fd915e71", "external-id": "nsx-vlan-transportzone-436", "segmentation_id": 436, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd201dadc-ab", "ovs_interfaceid": "d201dadc-ab89-4ede-8c29-41217e3af341", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1243.515325] env[69927]: DEBUG oslo_vmware.api [None req-8786197e-917d-4df3-9d12-fcb76b53ad82 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096866, 'name': ReconfigVM_Task, 'duration_secs': 0.161715} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.515666] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-8786197e-917d-4df3-9d12-fcb76b53ad82 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811595', 'volume_id': '2e7c1ed4-9d5c-4f45-aaf5-2b53acd1d257', 'name': 'volume-2e7c1ed4-9d5c-4f45-aaf5-2b53acd1d257', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b007a697-7da4-4c97-9ccb-046d86b27568', 'attached_at': '', 'detached_at': '', 'volume_id': '2e7c1ed4-9d5c-4f45-aaf5-2b53acd1d257', 'serial': '2e7c1ed4-9d5c-4f45-aaf5-2b53acd1d257'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1243.548066] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Releasing lock "[datastore2] devstack-image-cache_base/87cd0321-c9d5-427e-8af6-c3bd78649765" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1243.548066] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Processing image 87cd0321-c9d5-427e-8af6-c3bd78649765 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1243.548066] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/87cd0321-c9d5-427e-8af6-c3bd78649765/87cd0321-c9d5-427e-8af6-c3bd78649765.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1243.548297] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquired lock "[datastore2] devstack-image-cache_base/87cd0321-c9d5-427e-8af6-c3bd78649765/87cd0321-c9d5-427e-8af6-c3bd78649765.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1243.548355] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1243.548630] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-61f62151-9b57-46a9-82b0-b878b3609d87 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.559028] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1243.559028] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1243.559700] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ef7084a-0a3f-414d-a545-7fa7ec21a3ca {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.565966] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1243.565966] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ea30a7-9f6e-36cf-5577-a35aae9f1a43" [ 1243.565966] env[69927]: _type = "Task" [ 1243.565966] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.579091] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ea30a7-9f6e-36cf-5577-a35aae9f1a43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.628766] env[69927]: INFO nova.compute.resource_tracker [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Updating resource usage from migration c7013a21-99b2-4237-b87d-4f1ebfd104b5 [ 1243.841656] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0fca10e-8fbd-4a34-bb1c-9a6c9fb0ec7d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.848431] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096867, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.853732] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e42eb18a-7cf2-4117-bc3c-9bb475d490c2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.885269] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b66533e4-a8c4-4c2c-93dc-42cdaa49e0e3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.893545] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f7bf39a-92ba-4371-961c-e6ec24723b4e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.899252] env[69927]: DEBUG nova.network.neutron [-] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1243.910663] env[69927]: DEBUG nova.compute.provider_tree [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1243.978061] env[69927]: DEBUG oslo_concurrency.lockutils [req-b96252af-2fad-4432-9698-89d3193faf3c req-e48fccd2-e160-4a3e-8977-1d918130dc45 service nova] Releasing lock "refresh_cache-693a6c6b-8d1c-405e-bb17-73259e28f556" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1244.079406] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Preparing fetch location {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1244.079704] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Fetch image to [datastore2] OSTACK_IMG_8b250710-3990-49c5-b9cd-4c81786968be/OSTACK_IMG_8b250710-3990-49c5-b9cd-4c81786968be.vmdk {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1244.079928] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Downloading stream optimized image 87cd0321-c9d5-427e-8af6-c3bd78649765 to [datastore2] OSTACK_IMG_8b250710-3990-49c5-b9cd-4c81786968be/OSTACK_IMG_8b250710-3990-49c5-b9cd-4c81786968be.vmdk on the data store datastore2 as vApp {{(pid=69927) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1244.080273] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Downloading image file data 87cd0321-c9d5-427e-8af6-c3bd78649765 to the ESX as VM named 'OSTACK_IMG_8b250710-3990-49c5-b9cd-4c81786968be' {{(pid=69927) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1244.157389] env[69927]: DEBUG oslo_vmware.rw_handles [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1244.157389] env[69927]: value = "resgroup-9" [ 1244.157389] env[69927]: _type = "ResourcePool" [ 1244.157389] env[69927]: }. {{(pid=69927) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1244.157700] env[69927]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-3d0854c2-27c8-4f57-9093-4712b2e3ec81 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.181406] env[69927]: DEBUG oslo_vmware.rw_handles [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lease: (returnval){ [ 1244.181406] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5234f860-890a-5bfd-561f-7f6199731aba" [ 1244.181406] env[69927]: _type = "HttpNfcLease" [ 1244.181406] env[69927]: } obtained for vApp import into resource pool (val){ [ 1244.181406] env[69927]: value = "resgroup-9" [ 1244.181406] env[69927]: _type = "ResourcePool" [ 1244.181406] env[69927]: }. {{(pid=69927) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1244.182125] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the lease: (returnval){ [ 1244.182125] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5234f860-890a-5bfd-561f-7f6199731aba" [ 1244.182125] env[69927]: _type = "HttpNfcLease" [ 1244.182125] env[69927]: } to be ready. {{(pid=69927) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1244.189718] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1244.189718] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5234f860-890a-5bfd-561f-7f6199731aba" [ 1244.189718] env[69927]: _type = "HttpNfcLease" [ 1244.189718] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1244.345680] env[69927]: DEBUG oslo_vmware.api [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096867, 'name': PowerOnVM_Task, 'duration_secs': 0.88109} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.345950] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1244.346168] env[69927]: INFO nova.compute.manager [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Took 8.66 seconds to spawn the instance on the hypervisor. [ 1244.346349] env[69927]: DEBUG nova.compute.manager [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1244.347730] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa5d9cc8-2ea0-40fe-82e5-25e73ceb1e2b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.367686] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "923b70fc-9959-48cf-8a9f-f8cd7c0c6b19" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1244.367908] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "923b70fc-9959-48cf-8a9f-f8cd7c0c6b19" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1244.413819] env[69927]: INFO nova.compute.manager [-] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Took 1.27 seconds to deallocate network for instance. [ 1244.414808] env[69927]: DEBUG nova.scheduler.client.report [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1244.558347] env[69927]: DEBUG nova.objects.instance [None req-8786197e-917d-4df3-9d12-fcb76b53ad82 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lazy-loading 'flavor' on Instance uuid b007a697-7da4-4c97-9ccb-046d86b27568 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1244.690926] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1244.690926] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5234f860-890a-5bfd-561f-7f6199731aba" [ 1244.690926] env[69927]: _type = "HttpNfcLease" [ 1244.690926] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1244.776271] env[69927]: DEBUG nova.compute.manager [req-4e809b90-0fb6-410d-b5e5-dffe55a114d6 req-46d9011a-3b1d-4857-a923-fb51eae07f02 service nova] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Received event network-vif-deleted-b92f830b-5eef-4260-a56b-94af4a4ec679 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1244.867332] env[69927]: INFO nova.compute.manager [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Took 19.21 seconds to build instance. [ 1244.870512] env[69927]: DEBUG nova.compute.manager [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1244.920723] env[69927]: DEBUG oslo_concurrency.lockutils [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.305s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1244.920938] env[69927]: INFO nova.compute.manager [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Migrating [ 1244.927147] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.669s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1244.928613] env[69927]: INFO nova.compute.claims [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1244.931645] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1245.062762] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8786197e-917d-4df3-9d12-fcb76b53ad82 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "b007a697-7da4-4c97-9ccb-046d86b27568" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.817s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1245.192521] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1245.192521] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5234f860-890a-5bfd-561f-7f6199731aba" [ 1245.192521] env[69927]: _type = "HttpNfcLease" [ 1245.192521] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1245.369763] env[69927]: DEBUG oslo_concurrency.lockutils [None req-eea2e180-697e-490d-a68c-94df0831b930 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "853d85d4-e98f-4810-a8db-b2a820ebc071" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.739s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1245.392485] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1245.439682] env[69927]: DEBUG oslo_concurrency.lockutils [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "refresh_cache-8b70b479-4a54-4bcb-813d-16cc0c9a67c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1245.439976] env[69927]: DEBUG oslo_concurrency.lockutils [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquired lock "refresh_cache-8b70b479-4a54-4bcb-813d-16cc0c9a67c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1245.440194] env[69927]: DEBUG nova.network.neutron [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1245.693055] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1245.693055] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5234f860-890a-5bfd-561f-7f6199731aba" [ 1245.693055] env[69927]: _type = "HttpNfcLease" [ 1245.693055] env[69927]: } is ready. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1245.693055] env[69927]: DEBUG oslo_vmware.rw_handles [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1245.693055] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5234f860-890a-5bfd-561f-7f6199731aba" [ 1245.693055] env[69927]: _type = "HttpNfcLease" [ 1245.693055] env[69927]: }. {{(pid=69927) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1245.693439] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f983fc72-5eaa-4243-aa6e-c2e910f0f4ab {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.701062] env[69927]: DEBUG oslo_vmware.rw_handles [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527e4bbe-560b-a8b0-80a4-b5715185cd6b/disk-0.vmdk from lease info. {{(pid=69927) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1245.701062] env[69927]: DEBUG oslo_vmware.rw_handles [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Creating HTTP connection to write to file with size = 31668224 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527e4bbe-560b-a8b0-80a4-b5715185cd6b/disk-0.vmdk. {{(pid=69927) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1245.763034] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-44e7a47f-cf78-4e35-b1a5-56bef4919f35 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.804815] env[69927]: INFO nova.compute.manager [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Rescuing [ 1245.805772] env[69927]: DEBUG oslo_concurrency.lockutils [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "refresh_cache-b007a697-7da4-4c97-9ccb-046d86b27568" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1245.806135] env[69927]: DEBUG oslo_concurrency.lockutils [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquired lock "refresh_cache-b007a697-7da4-4c97-9ccb-046d86b27568" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1245.806383] env[69927]: DEBUG nova.network.neutron [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1246.176155] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adea1b6d-e7c5-44ec-8e97-56252792ef3a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.188914] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27ae2f13-c93e-49cf-b815-2b79e6ad7d50 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.230674] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b21641a-246e-4068-9f6c-37ccf9b335ca {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.241462] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e469182a-5fcf-41c5-a2ae-22123f3b3413 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.260087] env[69927]: DEBUG nova.compute.provider_tree [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1246.308625] env[69927]: DEBUG nova.network.neutron [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Updating instance_info_cache with network_info: [{"id": "3e85edce-fa8a-45d4-b109-5bdd98a06303", "address": "fa:16:3e:df:fd:25", "network": {"id": "bd90fedf-a086-42e6-9814-07044a035f6e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-951403655-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cf6bb3492c642aa9a168e484299289c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e85edce-fa", "ovs_interfaceid": "3e85edce-fa8a-45d4-b109-5bdd98a06303", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1246.703056] env[69927]: DEBUG nova.network.neutron [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Updating instance_info_cache with network_info: [{"id": "037faf17-cf20-417c-ab4d-b0a08944b7d9", "address": "fa:16:3e:10:4b:39", "network": {"id": "527076e8-f800-4686-883d-e70629d8ba0d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-704161827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de9e293e2d1a4e179f01f60e882851b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap037faf17-cf", "ovs_interfaceid": "037faf17-cf20-417c-ab4d-b0a08944b7d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1246.766395] env[69927]: DEBUG nova.scheduler.client.report [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1246.811991] env[69927]: DEBUG oslo_concurrency.lockutils [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Releasing lock "refresh_cache-8b70b479-4a54-4bcb-813d-16cc0c9a67c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1247.013081] env[69927]: DEBUG oslo_vmware.rw_handles [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Completed reading data from the image iterator. {{(pid=69927) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1247.013481] env[69927]: DEBUG oslo_vmware.rw_handles [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527e4bbe-560b-a8b0-80a4-b5715185cd6b/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1247.015182] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbeab354-9c0d-4cf3-953a-5ae92900e279 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.024972] env[69927]: DEBUG oslo_vmware.rw_handles [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527e4bbe-560b-a8b0-80a4-b5715185cd6b/disk-0.vmdk is in state: ready. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1247.025261] env[69927]: DEBUG oslo_vmware.rw_handles [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527e4bbe-560b-a8b0-80a4-b5715185cd6b/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1247.025510] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-77504737-efdc-498a-afb3-9a8d572e2507 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.206357] env[69927]: DEBUG oslo_concurrency.lockutils [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Releasing lock "refresh_cache-b007a697-7da4-4c97-9ccb-046d86b27568" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1247.274789] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.346s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1247.274789] env[69927]: DEBUG nova.compute.manager [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1247.276600] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.345s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1247.276824] env[69927]: DEBUG nova.objects.instance [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lazy-loading 'resources' on Instance uuid d548ea75-9c1f-4884-b338-194f1b5d62ef {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1247.489528] env[69927]: DEBUG oslo_vmware.rw_handles [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527e4bbe-560b-a8b0-80a4-b5715185cd6b/disk-0.vmdk. {{(pid=69927) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1247.489707] env[69927]: INFO nova.virt.vmwareapi.images [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Downloaded image file data 87cd0321-c9d5-427e-8af6-c3bd78649765 [ 1247.491117] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d50114d-241f-47f8-98b9-a85130bd5b2c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.512924] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ffd87009-f279-411e-b0a9-9f993ed0dfca tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1247.513240] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ffd87009-f279-411e-b0a9-9f993ed0dfca tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1247.513435] env[69927]: DEBUG nova.compute.manager [None req-ffd87009-f279-411e-b0a9-9f993ed0dfca tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1247.514518] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1696f2f-6be7-455a-84ab-b5134c45ff98 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.517413] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-19029fc0-a225-4f94-98ea-1abafd3fd190 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.523942] env[69927]: DEBUG nova.compute.manager [None req-ffd87009-f279-411e-b0a9-9f993ed0dfca tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69927) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1247.524676] env[69927]: DEBUG nova.objects.instance [None req-ffd87009-f279-411e-b0a9-9f993ed0dfca tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lazy-loading 'flavor' on Instance uuid 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1247.658469] env[69927]: INFO nova.virt.vmwareapi.images [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] The imported VM was unregistered [ 1247.661831] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Caching image {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1247.662538] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Creating directory with path [datastore2] devstack-image-cache_base/87cd0321-c9d5-427e-8af6-c3bd78649765 {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1247.662646] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4153302e-bfd4-4609-a602-e6d607dbae8c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.688888] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Created directory with path [datastore2] devstack-image-cache_base/87cd0321-c9d5-427e-8af6-c3bd78649765 {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1247.689109] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_8b250710-3990-49c5-b9cd-4c81786968be/OSTACK_IMG_8b250710-3990-49c5-b9cd-4c81786968be.vmdk to [datastore2] devstack-image-cache_base/87cd0321-c9d5-427e-8af6-c3bd78649765/87cd0321-c9d5-427e-8af6-c3bd78649765.vmdk. {{(pid=69927) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1247.689445] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-9161957b-3ead-4192-9463-95d7f5cd58c5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.697765] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1247.697765] env[69927]: value = "task-4096870" [ 1247.697765] env[69927]: _type = "Task" [ 1247.697765] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.707991] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096870, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.783891] env[69927]: DEBUG nova.compute.utils [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1247.786061] env[69927]: DEBUG nova.compute.manager [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1247.786061] env[69927]: DEBUG nova.network.neutron [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1247.844093] env[69927]: DEBUG nova.policy [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bcad782c7af5411d87af2f0bece7542e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bf75fa4b8ec6436999f00b7cb4b57e24', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1248.023199] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-426cf101-f2a2-4668-a5b1-d315a9160c30 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.034866] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03c88859-695e-4df3-8282-a46642199965 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.071036] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798f57f5-0ba2-4174-a17d-043de863e325 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.078932] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef796cfe-6cdb-4f58-af41-2940a02eec85 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.097503] env[69927]: DEBUG nova.compute.provider_tree [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1248.209685] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096870, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.244612] env[69927]: DEBUG nova.network.neutron [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Successfully created port: 1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1248.289533] env[69927]: DEBUG nova.compute.manager [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1248.333466] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc2a3a1c-781f-4c5b-bf3f-5a578b47cb66 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.357033] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Updating instance '8b70b479-4a54-4bcb-813d-16cc0c9a67c5' progress to 0 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1248.541689] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffd87009-f279-411e-b0a9-9f993ed0dfca tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1248.541689] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-66af431e-387c-41db-841b-9794494bec6b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.558724] env[69927]: DEBUG oslo_vmware.api [None req-ffd87009-f279-411e-b0a9-9f993ed0dfca tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1248.558724] env[69927]: value = "task-4096871" [ 1248.558724] env[69927]: _type = "Task" [ 1248.558724] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.571539] env[69927]: DEBUG oslo_vmware.api [None req-ffd87009-f279-411e-b0a9-9f993ed0dfca tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096871, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.601539] env[69927]: DEBUG nova.scheduler.client.report [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1248.711922] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096870, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.750165] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1248.750526] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f68ea36d-3853-4d89-acab-000d8f7481e7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.760155] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1248.760155] env[69927]: value = "task-4096872" [ 1248.760155] env[69927]: _type = "Task" [ 1248.760155] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.772223] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096872, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.864115] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1248.864882] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-84c0a87c-a383-4896-b0ba-2fb283bd061b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.873556] env[69927]: DEBUG oslo_vmware.api [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1248.873556] env[69927]: value = "task-4096873" [ 1248.873556] env[69927]: _type = "Task" [ 1248.873556] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.884482] env[69927]: DEBUG oslo_vmware.api [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096873, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.070019] env[69927]: DEBUG oslo_vmware.api [None req-ffd87009-f279-411e-b0a9-9f993ed0dfca tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096871, 'name': PowerOffVM_Task, 'duration_secs': 0.49376} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.070382] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffd87009-f279-411e-b0a9-9f993ed0dfca tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1249.070599] env[69927]: DEBUG nova.compute.manager [None req-ffd87009-f279-411e-b0a9-9f993ed0dfca tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1249.071538] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c120ebfc-7cd4-42e8-9d1f-8b484ec5ae8e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.108022] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.831s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1249.111629] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.719s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1249.113329] env[69927]: INFO nova.compute.claims [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1249.137740] env[69927]: INFO nova.scheduler.client.report [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Deleted allocations for instance d548ea75-9c1f-4884-b338-194f1b5d62ef [ 1249.213573] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096870, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.273873] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096872, 'name': PowerOffVM_Task, 'duration_secs': 0.505932} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.273873] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1249.274858] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40a2805e-9e9c-462b-a254-874bd450aa69 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.299442] env[69927]: DEBUG nova.compute.manager [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1249.303561] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce2244c-2925-400f-8187-dde4be2f16fe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.331049] env[69927]: DEBUG nova.virt.hardware [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1249.331049] env[69927]: DEBUG nova.virt.hardware [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1249.331332] env[69927]: DEBUG nova.virt.hardware [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1249.331332] env[69927]: DEBUG nova.virt.hardware [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1249.331409] env[69927]: DEBUG nova.virt.hardware [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1249.331561] env[69927]: DEBUG nova.virt.hardware [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1249.331793] env[69927]: DEBUG nova.virt.hardware [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1249.331946] env[69927]: DEBUG nova.virt.hardware [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1249.332131] env[69927]: DEBUG nova.virt.hardware [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1249.332300] env[69927]: DEBUG nova.virt.hardware [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1249.332477] env[69927]: DEBUG nova.virt.hardware [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1249.334743] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b661c91-cfa8-4f9d-ad6d-d760963a5a77 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.345903] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1249.347649] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf615613-a7c8-402d-b65b-d9eda3bdf8e1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.352729] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d0ef03bd-1858-4644-a2b8-c07888b7ff79 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.368053] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1249.368053] env[69927]: value = "task-4096874" [ 1249.368053] env[69927]: _type = "Task" [ 1249.368053] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.381881] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] VM already powered off {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1249.381881] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1249.382129] env[69927]: DEBUG oslo_concurrency.lockutils [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1249.382298] env[69927]: DEBUG oslo_concurrency.lockutils [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1249.382488] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1249.382787] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94f95952-06fa-42f5-9abf-1f6b8adcb254 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.390815] env[69927]: DEBUG oslo_vmware.api [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096873, 'name': PowerOffVM_Task, 'duration_secs': 0.343655} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.391107] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1249.391298] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Updating instance '8b70b479-4a54-4bcb-813d-16cc0c9a67c5' progress to 17 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1249.402675] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1249.403878] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1249.404913] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c7927f4-232f-4a13-8488-ad48ba232f02 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.415591] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1249.415591] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525b5fcb-2be7-8dbb-6893-c65dcbc84242" [ 1249.415591] env[69927]: _type = "Task" [ 1249.415591] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.429540] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525b5fcb-2be7-8dbb-6893-c65dcbc84242, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.590071] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ffd87009-f279-411e-b0a9-9f993ed0dfca tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.076s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1249.647968] env[69927]: DEBUG oslo_concurrency.lockutils [None req-a67978b0-2e72-409e-9205-f99f025ea759 tempest-AttachInterfacesTestJSON-901858583 tempest-AttachInterfacesTestJSON-901858583-project-member] Lock "d548ea75-9c1f-4884-b338-194f1b5d62ef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.687s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1249.710928] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096870, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.900365] env[69927]: DEBUG nova.virt.hardware [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:35:01Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1249.900365] env[69927]: DEBUG nova.virt.hardware [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1249.900365] env[69927]: DEBUG nova.virt.hardware [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1249.900365] env[69927]: DEBUG nova.virt.hardware [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1249.901758] env[69927]: DEBUG nova.virt.hardware [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1249.902182] env[69927]: DEBUG nova.virt.hardware [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1249.904315] env[69927]: DEBUG nova.virt.hardware [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1249.904315] env[69927]: DEBUG nova.virt.hardware [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1249.904315] env[69927]: DEBUG nova.virt.hardware [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1249.904315] env[69927]: DEBUG nova.virt.hardware [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1249.904315] env[69927]: DEBUG nova.virt.hardware [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1249.914265] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35fe21d6-bfc1-4bc4-be18-7a08b1c06f57 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.928784] env[69927]: DEBUG nova.compute.manager [req-194509e7-fb06-482f-b96a-9e3a9f9d47fe req-f32265e1-6dd6-4d5d-ad8d-b9bd7028b882 service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Received event network-vif-plugged-1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1249.928917] env[69927]: DEBUG oslo_concurrency.lockutils [req-194509e7-fb06-482f-b96a-9e3a9f9d47fe req-f32265e1-6dd6-4d5d-ad8d-b9bd7028b882 service nova] Acquiring lock "8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1249.929334] env[69927]: DEBUG oslo_concurrency.lockutils [req-194509e7-fb06-482f-b96a-9e3a9f9d47fe req-f32265e1-6dd6-4d5d-ad8d-b9bd7028b882 service nova] Lock "8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1249.930901] env[69927]: DEBUG oslo_concurrency.lockutils [req-194509e7-fb06-482f-b96a-9e3a9f9d47fe req-f32265e1-6dd6-4d5d-ad8d-b9bd7028b882 service nova] Lock "8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1249.931149] env[69927]: DEBUG nova.compute.manager [req-194509e7-fb06-482f-b96a-9e3a9f9d47fe req-f32265e1-6dd6-4d5d-ad8d-b9bd7028b882 service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] No waiting events found dispatching network-vif-plugged-1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1249.931609] env[69927]: WARNING nova.compute.manager [req-194509e7-fb06-482f-b96a-9e3a9f9d47fe req-f32265e1-6dd6-4d5d-ad8d-b9bd7028b882 service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Received unexpected event network-vif-plugged-1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2 for instance with vm_state building and task_state spawning. [ 1249.948037] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]525b5fcb-2be7-8dbb-6893-c65dcbc84242, 'name': SearchDatastore_Task, 'duration_secs': 0.078951} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.948807] env[69927]: DEBUG oslo_vmware.api [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1249.948807] env[69927]: value = "task-4096875" [ 1249.948807] env[69927]: _type = "Task" [ 1249.948807] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.949057] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-077ad9c8-ac80-4380-9246-5fbbcdc79015 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.960784] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1249.960784] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52666a64-2bb6-dca1-c0b6-45299651b338" [ 1249.960784] env[69927]: _type = "Task" [ 1249.960784] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.964764] env[69927]: DEBUG oslo_vmware.api [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096875, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.976421] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52666a64-2bb6-dca1-c0b6-45299651b338, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.017390] env[69927]: DEBUG nova.network.neutron [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Successfully updated port: 1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1250.094442] env[69927]: DEBUG nova.objects.instance [None req-93b8f3f7-2e70-4226-ae68-580f0bfa48b8 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lazy-loading 'flavor' on Instance uuid 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1250.214296] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096870, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.339314] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c52dd09-4b60-45c3-a8ff-57129e519098 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.348366] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f38d1252-f67a-44c8-8c82-be7f5a6e3940 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.382831] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1501b628-8f69-4f5f-b2da-18aeb5a99105 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.391587] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2720482c-d515-4a03-8f33-1ecc8949b2ac {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.406565] env[69927]: DEBUG nova.compute.provider_tree [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1250.462469] env[69927]: DEBUG oslo_vmware.api [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096875, 'name': ReconfigVM_Task, 'duration_secs': 0.32548} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.462665] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Updating instance '8b70b479-4a54-4bcb-813d-16cc0c9a67c5' progress to 33 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1250.476753] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52666a64-2bb6-dca1-c0b6-45299651b338, 'name': SearchDatastore_Task, 'duration_secs': 0.091367} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.480066] env[69927]: DEBUG oslo_concurrency.lockutils [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1250.480066] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] b007a697-7da4-4c97-9ccb-046d86b27568/f524494e-9179-4b3e-a3e2-782f019def24-rescue.vmdk. {{(pid=69927) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1250.480066] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-43ac1dd3-57cd-4864-9665-2e9000aba75a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.485492] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1250.485492] env[69927]: value = "task-4096876" [ 1250.485492] env[69927]: _type = "Task" [ 1250.485492] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.495398] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096876, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.523880] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Acquiring lock "refresh_cache-8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.524256] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Acquired lock "refresh_cache-8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1250.524256] env[69927]: DEBUG nova.network.neutron [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1250.600711] env[69927]: DEBUG oslo_concurrency.lockutils [None req-93b8f3f7-2e70-4226-ae68-580f0bfa48b8 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "refresh_cache-80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.601031] env[69927]: DEBUG oslo_concurrency.lockutils [None req-93b8f3f7-2e70-4226-ae68-580f0bfa48b8 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquired lock "refresh_cache-80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1250.601128] env[69927]: DEBUG nova.network.neutron [None req-93b8f3f7-2e70-4226-ae68-580f0bfa48b8 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1250.601281] env[69927]: DEBUG nova.objects.instance [None req-93b8f3f7-2e70-4226-ae68-580f0bfa48b8 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lazy-loading 'info_cache' on Instance uuid 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1250.712742] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096870, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.909664] env[69927]: DEBUG nova.scheduler.client.report [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1250.969689] env[69927]: DEBUG nova.virt.hardware [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1250.970054] env[69927]: DEBUG nova.virt.hardware [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1250.970166] env[69927]: DEBUG nova.virt.hardware [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1250.970354] env[69927]: DEBUG nova.virt.hardware [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1250.970711] env[69927]: DEBUG nova.virt.hardware [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1250.970711] env[69927]: DEBUG nova.virt.hardware [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1250.970894] env[69927]: DEBUG nova.virt.hardware [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1250.971073] env[69927]: DEBUG nova.virt.hardware [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1250.971247] env[69927]: DEBUG nova.virt.hardware [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1250.971411] env[69927]: DEBUG nova.virt.hardware [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1250.971592] env[69927]: DEBUG nova.virt.hardware [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1250.978174] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Reconfiguring VM instance instance-0000006a to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1250.978942] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f5c4156-ddc9-453d-bd52-9718f926a850 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.005271] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096876, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.007339] env[69927]: DEBUG oslo_vmware.api [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1251.007339] env[69927]: value = "task-4096877" [ 1251.007339] env[69927]: _type = "Task" [ 1251.007339] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.020107] env[69927]: DEBUG oslo_vmware.api [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096877, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.086915] env[69927]: DEBUG nova.network.neutron [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1251.105681] env[69927]: DEBUG nova.objects.base [None req-93b8f3f7-2e70-4226-ae68-580f0bfa48b8 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Object Instance<80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d> lazy-loaded attributes: flavor,info_cache {{(pid=69927) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1251.213530] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096870, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.04365} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.213851] env[69927]: INFO nova.virt.vmwareapi.ds_util [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_8b250710-3990-49c5-b9cd-4c81786968be/OSTACK_IMG_8b250710-3990-49c5-b9cd-4c81786968be.vmdk to [datastore2] devstack-image-cache_base/87cd0321-c9d5-427e-8af6-c3bd78649765/87cd0321-c9d5-427e-8af6-c3bd78649765.vmdk. [ 1251.214066] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Cleaning up location [datastore2] OSTACK_IMG_8b250710-3990-49c5-b9cd-4c81786968be {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1251.214230] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_8b250710-3990-49c5-b9cd-4c81786968be {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1251.214496] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-16efb22f-0d7c-40c7-a18b-9445c6f0ce53 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.222105] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1251.222105] env[69927]: value = "task-4096878" [ 1251.222105] env[69927]: _type = "Task" [ 1251.222105] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.231384] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096878, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.406327] env[69927]: DEBUG nova.network.neutron [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Updating instance_info_cache with network_info: [{"id": "1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2", "address": "fa:16:3e:8e:7a:45", "network": {"id": "25e3d194-9d78-4e94-abb8-b5f521eb1990", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-666062733-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bf75fa4b8ec6436999f00b7cb4b57e24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ce221a9-57", "ovs_interfaceid": "1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1251.414928] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.303s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1251.415461] env[69927]: DEBUG nova.compute.manager [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1251.503121] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096876, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.665945} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.503395] env[69927]: INFO nova.virt.vmwareapi.ds_util [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] b007a697-7da4-4c97-9ccb-046d86b27568/f524494e-9179-4b3e-a3e2-782f019def24-rescue.vmdk. [ 1251.504201] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b428d4ec-e85c-445c-aee7-b013ea8381e1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.536069] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] b007a697-7da4-4c97-9ccb-046d86b27568/f524494e-9179-4b3e-a3e2-782f019def24-rescue.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1251.536069] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-62aa3b77-2417-4c73-a41f-e990718c566e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.550884] env[69927]: DEBUG oslo_vmware.api [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096877, 'name': ReconfigVM_Task, 'duration_secs': 0.268711} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.551860] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Reconfigured VM instance instance-0000006a to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1251.552417] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f594ae-3591-43d2-84ab-a9f3d7e7c547 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.556552] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1251.556552] env[69927]: value = "task-4096879" [ 1251.556552] env[69927]: _type = "Task" [ 1251.556552] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.581476] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] 8b70b479-4a54-4bcb-813d-16cc0c9a67c5/8b70b479-4a54-4bcb-813d-16cc0c9a67c5.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1251.582352] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c949af9-d5eb-448e-8277-eec416b7229b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.600709] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096879, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.605409] env[69927]: DEBUG oslo_vmware.api [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1251.605409] env[69927]: value = "task-4096880" [ 1251.605409] env[69927]: _type = "Task" [ 1251.605409] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.618622] env[69927]: DEBUG oslo_vmware.api [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096880, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.736019] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096878, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.039804} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.736318] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1251.736497] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Releasing lock "[datastore2] devstack-image-cache_base/87cd0321-c9d5-427e-8af6-c3bd78649765/87cd0321-c9d5-427e-8af6-c3bd78649765.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1251.736757] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/87cd0321-c9d5-427e-8af6-c3bd78649765/87cd0321-c9d5-427e-8af6-c3bd78649765.vmdk to [datastore2] 693a6c6b-8d1c-405e-bb17-73259e28f556/693a6c6b-8d1c-405e-bb17-73259e28f556.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1251.737056] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-18a2fa18-9eef-41ea-a921-9cd20385c2bb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.746628] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1251.746628] env[69927]: value = "task-4096881" [ 1251.746628] env[69927]: _type = "Task" [ 1251.746628] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.757088] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096881, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.908953] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Releasing lock "refresh_cache-8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1251.909425] env[69927]: DEBUG nova.compute.manager [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Instance network_info: |[{"id": "1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2", "address": "fa:16:3e:8e:7a:45", "network": {"id": "25e3d194-9d78-4e94-abb8-b5f521eb1990", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-666062733-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bf75fa4b8ec6436999f00b7cb4b57e24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ce221a9-57", "ovs_interfaceid": "1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1251.909959] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:7a:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27138a4c-60c9-45fb-bf37-4c2f765315a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1251.920065] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Creating folder: Project (bf75fa4b8ec6436999f00b7cb4b57e24). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1251.921737] env[69927]: DEBUG nova.compute.utils [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1251.923429] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-011dbff1-babd-4033-a226-227b3731c039 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.926341] env[69927]: DEBUG nova.compute.manager [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1251.926599] env[69927]: DEBUG nova.network.neutron [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1251.948090] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Created folder: Project (bf75fa4b8ec6436999f00b7cb4b57e24) in parent group-v811283. [ 1251.948359] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Creating folder: Instances. Parent ref: group-v811599. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1251.949038] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-499d2f12-af62-4b27-8eb6-3425cbe9a19c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.965650] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Created folder: Instances in parent group-v811599. [ 1251.965650] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1251.965650] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1251.966112] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a9a47aeb-607b-4b2a-9634-b3946760db7c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.988216] env[69927]: DEBUG nova.compute.manager [req-7c713783-c77b-440e-9a89-27763f64f969 req-09e8d9a8-8913-4304-9514-2bb97bec8737 service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Received event network-changed-1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1251.988429] env[69927]: DEBUG nova.compute.manager [req-7c713783-c77b-440e-9a89-27763f64f969 req-09e8d9a8-8913-4304-9514-2bb97bec8737 service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Refreshing instance network info cache due to event network-changed-1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1251.988645] env[69927]: DEBUG oslo_concurrency.lockutils [req-7c713783-c77b-440e-9a89-27763f64f969 req-09e8d9a8-8913-4304-9514-2bb97bec8737 service nova] Acquiring lock "refresh_cache-8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1251.989300] env[69927]: DEBUG oslo_concurrency.lockutils [req-7c713783-c77b-440e-9a89-27763f64f969 req-09e8d9a8-8913-4304-9514-2bb97bec8737 service nova] Acquired lock "refresh_cache-8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1251.989300] env[69927]: DEBUG nova.network.neutron [req-7c713783-c77b-440e-9a89-27763f64f969 req-09e8d9a8-8913-4304-9514-2bb97bec8737 service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Refreshing network info cache for port 1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1251.991673] env[69927]: DEBUG nova.policy [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '76414b2ae1aa4ab582c2b59fd4218005', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '544f2a021144492ba1aea46ce6075e53', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1251.999233] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1251.999233] env[69927]: value = "task-4096884" [ 1251.999233] env[69927]: _type = "Task" [ 1251.999233] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.011134] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096884, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.070254] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096879, 'name': ReconfigVM_Task, 'duration_secs': 0.38592} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.070649] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Reconfigured VM instance instance-0000006b to attach disk [datastore2] b007a697-7da4-4c97-9ccb-046d86b27568/f524494e-9179-4b3e-a3e2-782f019def24-rescue.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1252.071582] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-712de649-9e53-421a-b40d-00ea817e7419 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.105858] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91e232d4-b7e9-44ac-9376-1d010545d65d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.127761] env[69927]: DEBUG nova.network.neutron [None req-93b8f3f7-2e70-4226-ae68-580f0bfa48b8 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Updating instance_info_cache with network_info: [{"id": "571ec5f9-628f-4a79-8f19-13c41eb94377", "address": "fa:16:3e:37:97:cb", "network": {"id": "8b69632f-2333-4f76-bdfe-a301ba92d3b7", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1292205702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2750a80142e4dc1b12a6caf543768ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d177c5b3-a5b1-4c78-854e-7e0dbf341ea1", "external-id": "nsx-vlan-transportzone-54", "segmentation_id": 54, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap571ec5f9-62", "ovs_interfaceid": "571ec5f9-628f-4a79-8f19-13c41eb94377", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1252.140058] env[69927]: DEBUG oslo_vmware.api [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096880, 'name': ReconfigVM_Task, 'duration_secs': 0.401253} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.140058] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Reconfigured VM instance instance-0000006a to attach disk [datastore1] 8b70b479-4a54-4bcb-813d-16cc0c9a67c5/8b70b479-4a54-4bcb-813d-16cc0c9a67c5.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1252.140058] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Updating instance '8b70b479-4a54-4bcb-813d-16cc0c9a67c5' progress to 50 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1252.141374] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1252.141374] env[69927]: value = "task-4096885" [ 1252.141374] env[69927]: _type = "Task" [ 1252.141374] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.153989] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096885, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.261502] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096881, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.390984] env[69927]: DEBUG nova.network.neutron [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Successfully created port: 1e9afa83-f755-4605-8818-5470b4d2bd50 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1252.429412] env[69927]: DEBUG nova.compute.manager [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1252.512871] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096884, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.636869] env[69927]: DEBUG oslo_concurrency.lockutils [None req-93b8f3f7-2e70-4226-ae68-580f0bfa48b8 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Releasing lock "refresh_cache-80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1252.647652] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e414284-440c-453d-91c3-a973dc75f8d4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.676642] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096885, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.677526] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5fd097-b7ad-4819-b291-00e8f71a923a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.701123] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Updating instance '8b70b479-4a54-4bcb-813d-16cc0c9a67c5' progress to 67 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1252.760655] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096881, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.012224] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096884, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.049792] env[69927]: DEBUG nova.network.neutron [req-7c713783-c77b-440e-9a89-27763f64f969 req-09e8d9a8-8913-4304-9514-2bb97bec8737 service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Updated VIF entry in instance network info cache for port 1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1253.049916] env[69927]: DEBUG nova.network.neutron [req-7c713783-c77b-440e-9a89-27763f64f969 req-09e8d9a8-8913-4304-9514-2bb97bec8737 service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Updating instance_info_cache with network_info: [{"id": "1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2", "address": "fa:16:3e:8e:7a:45", "network": {"id": "25e3d194-9d78-4e94-abb8-b5f521eb1990", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-666062733-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bf75fa4b8ec6436999f00b7cb4b57e24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ce221a9-57", "ovs_interfaceid": "1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1253.154780] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096885, 'name': ReconfigVM_Task, 'duration_secs': 0.756257} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.156331] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1253.156331] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7a111c70-474a-4e7a-87fe-f238dcc7241d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.163629] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1253.163629] env[69927]: value = "task-4096886" [ 1253.163629] env[69927]: _type = "Task" [ 1253.163629] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.176926] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096886, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.259974] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096881, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.440111] env[69927]: DEBUG nova.compute.manager [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1253.472324] env[69927]: DEBUG nova.virt.hardware [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1253.472599] env[69927]: DEBUG nova.virt.hardware [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1253.472831] env[69927]: DEBUG nova.virt.hardware [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1253.473049] env[69927]: DEBUG nova.virt.hardware [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1253.473202] env[69927]: DEBUG nova.virt.hardware [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1253.473350] env[69927]: DEBUG nova.virt.hardware [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1253.473626] env[69927]: DEBUG nova.virt.hardware [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1253.473883] env[69927]: DEBUG nova.virt.hardware [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1253.474189] env[69927]: DEBUG nova.virt.hardware [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1253.474348] env[69927]: DEBUG nova.virt.hardware [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1253.474845] env[69927]: DEBUG nova.virt.hardware [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1253.475722] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1900ffb4-5a4e-4623-aad1-7c60f373f138 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.486466] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21761097-2ffb-4732-9613-e5754564db9c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.511483] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096884, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.556766] env[69927]: DEBUG oslo_concurrency.lockutils [req-7c713783-c77b-440e-9a89-27763f64f969 req-09e8d9a8-8913-4304-9514-2bb97bec8737 service nova] Releasing lock "refresh_cache-8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1253.642849] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-93b8f3f7-2e70-4226-ae68-580f0bfa48b8 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1253.643226] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c7f3b608-5f88-41ab-803b-828a4d90cbf1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.652901] env[69927]: DEBUG oslo_vmware.api [None req-93b8f3f7-2e70-4226-ae68-580f0bfa48b8 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1253.652901] env[69927]: value = "task-4096887" [ 1253.652901] env[69927]: _type = "Task" [ 1253.652901] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.664671] env[69927]: DEBUG oslo_vmware.api [None req-93b8f3f7-2e70-4226-ae68-580f0bfa48b8 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096887, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.680277] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096886, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.759700] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096881, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.882204] env[69927]: DEBUG nova.compute.manager [req-76f300d7-ada2-4ea0-b4ea-d85224c4df4b req-2140b4b7-6ad4-422b-864a-d16fa5d6ba79 service nova] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Received event network-vif-plugged-1e9afa83-f755-4605-8818-5470b4d2bd50 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1253.882486] env[69927]: DEBUG oslo_concurrency.lockutils [req-76f300d7-ada2-4ea0-b4ea-d85224c4df4b req-2140b4b7-6ad4-422b-864a-d16fa5d6ba79 service nova] Acquiring lock "923b70fc-9959-48cf-8a9f-f8cd7c0c6b19-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1253.882899] env[69927]: DEBUG oslo_concurrency.lockutils [req-76f300d7-ada2-4ea0-b4ea-d85224c4df4b req-2140b4b7-6ad4-422b-864a-d16fa5d6ba79 service nova] Lock "923b70fc-9959-48cf-8a9f-f8cd7c0c6b19-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1253.883121] env[69927]: DEBUG oslo_concurrency.lockutils [req-76f300d7-ada2-4ea0-b4ea-d85224c4df4b req-2140b4b7-6ad4-422b-864a-d16fa5d6ba79 service nova] Lock "923b70fc-9959-48cf-8a9f-f8cd7c0c6b19-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1253.883318] env[69927]: DEBUG nova.compute.manager [req-76f300d7-ada2-4ea0-b4ea-d85224c4df4b req-2140b4b7-6ad4-422b-864a-d16fa5d6ba79 service nova] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] No waiting events found dispatching network-vif-plugged-1e9afa83-f755-4605-8818-5470b4d2bd50 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1253.883765] env[69927]: WARNING nova.compute.manager [req-76f300d7-ada2-4ea0-b4ea-d85224c4df4b req-2140b4b7-6ad4-422b-864a-d16fa5d6ba79 service nova] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Received unexpected event network-vif-plugged-1e9afa83-f755-4605-8818-5470b4d2bd50 for instance with vm_state building and task_state spawning. [ 1254.002261] env[69927]: DEBUG nova.network.neutron [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Successfully updated port: 1e9afa83-f755-4605-8818-5470b4d2bd50 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1254.015607] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096884, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.166036] env[69927]: DEBUG oslo_vmware.api [None req-93b8f3f7-2e70-4226-ae68-580f0bfa48b8 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096887, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.175999] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096886, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.261597] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096881, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.401016] env[69927]: DEBUG nova.network.neutron [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Port 3e85edce-fa8a-45d4-b109-5bdd98a06303 binding to destination host cpu-1 is already ACTIVE {{(pid=69927) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1254.505205] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "refresh_cache-923b70fc-9959-48cf-8a9f-f8cd7c0c6b19" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.505508] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired lock "refresh_cache-923b70fc-9959-48cf-8a9f-f8cd7c0c6b19" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1254.505508] env[69927]: DEBUG nova.network.neutron [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1254.517679] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096884, 'name': CreateVM_Task, 'duration_secs': 2.261016} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.518832] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1254.520076] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.520076] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1254.520237] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1254.521016] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06bc26d6-2898-4956-b841-a1abcc008f45 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.527066] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Waiting for the task: (returnval){ [ 1254.527066] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a439c2-0e26-e404-8677-b5324dd7d21d" [ 1254.527066] env[69927]: _type = "Task" [ 1254.527066] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.536868] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a439c2-0e26-e404-8677-b5324dd7d21d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.663217] env[69927]: DEBUG oslo_vmware.api [None req-93b8f3f7-2e70-4226-ae68-580f0bfa48b8 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096887, 'name': PowerOnVM_Task, 'duration_secs': 0.760677} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.663472] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-93b8f3f7-2e70-4226-ae68-580f0bfa48b8 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1254.663671] env[69927]: DEBUG nova.compute.manager [None req-93b8f3f7-2e70-4226-ae68-580f0bfa48b8 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1254.664471] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6edd5cba-b23b-4da7-8e01-40aa5a9f4dd8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.680035] env[69927]: DEBUG oslo_vmware.api [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096886, 'name': PowerOnVM_Task, 'duration_secs': 1.230491} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.680035] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1254.682249] env[69927]: DEBUG nova.compute.manager [None req-38540e2e-6dd5-4098-b5a5-ffd741995c05 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1254.683010] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59de3938-6373-45e1-b55a-4244c67c7686 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.762264] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096881, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.537554} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.762481] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/87cd0321-c9d5-427e-8af6-c3bd78649765/87cd0321-c9d5-427e-8af6-c3bd78649765.vmdk to [datastore2] 693a6c6b-8d1c-405e-bb17-73259e28f556/693a6c6b-8d1c-405e-bb17-73259e28f556.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1254.763217] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9f87254-5468-47c3-8161-b1bbe3ad9b63 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.787817] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] 693a6c6b-8d1c-405e-bb17-73259e28f556/693a6c6b-8d1c-405e-bb17-73259e28f556.vmdk or device None with type streamOptimized {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1254.788192] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dab24171-9d2a-43b6-8de2-65fb05561aa3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.810457] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1254.810457] env[69927]: value = "task-4096888" [ 1254.810457] env[69927]: _type = "Task" [ 1254.810457] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.819577] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096888, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.988868] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7c985137-7aa6-472e-99e0-fb06afc1f3e9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "da468d11-82a4-4fec-b06a-1b522bacdbc2" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1254.989155] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7c985137-7aa6-472e-99e0-fb06afc1f3e9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "da468d11-82a4-4fec-b06a-1b522bacdbc2" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1254.989427] env[69927]: INFO nova.compute.manager [None req-7c985137-7aa6-472e-99e0-fb06afc1f3e9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Rebooting instance [ 1255.038604] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a439c2-0e26-e404-8677-b5324dd7d21d, 'name': SearchDatastore_Task, 'duration_secs': 0.011663} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.038951] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1255.039220] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1255.039489] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1255.039705] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1255.039921] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1255.040207] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2fe03ac0-b93f-4cf8-acc5-12bfa7d211d7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.043283] env[69927]: DEBUG nova.network.neutron [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1255.053217] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1255.053386] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1255.054162] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd463065-1c9f-4aa5-80e7-f5bbebc6c395 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.063051] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Waiting for the task: (returnval){ [ 1255.063051] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52254bbc-b188-58b5-5bd9-77c787c9e73b" [ 1255.063051] env[69927]: _type = "Task" [ 1255.063051] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.074028] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52254bbc-b188-58b5-5bd9-77c787c9e73b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.198105] env[69927]: DEBUG nova.network.neutron [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Updating instance_info_cache with network_info: [{"id": "1e9afa83-f755-4605-8818-5470b4d2bd50", "address": "fa:16:3e:9c:c3:5a", "network": {"id": "5e9a6068-d0f1-44b2-b2a2-b87ebc0d53f2", "bridge": "br-int", "label": "tempest-ServersTestJSON-2067990706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "544f2a021144492ba1aea46ce6075e53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e9afa83-f7", "ovs_interfaceid": "1e9afa83-f755-4605-8818-5470b4d2bd50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.321419] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096888, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.425062] env[69927]: DEBUG oslo_concurrency.lockutils [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "8b70b479-4a54-4bcb-813d-16cc0c9a67c5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1255.425343] env[69927]: DEBUG oslo_concurrency.lockutils [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "8b70b479-4a54-4bcb-813d-16cc0c9a67c5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1255.425542] env[69927]: DEBUG oslo_concurrency.lockutils [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "8b70b479-4a54-4bcb-813d-16cc0c9a67c5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1255.521159] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7c985137-7aa6-472e-99e0-fb06afc1f3e9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "refresh_cache-da468d11-82a4-4fec-b06a-1b522bacdbc2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1255.521159] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7c985137-7aa6-472e-99e0-fb06afc1f3e9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired lock "refresh_cache-da468d11-82a4-4fec-b06a-1b522bacdbc2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1255.521542] env[69927]: DEBUG nova.network.neutron [None req-7c985137-7aa6-472e-99e0-fb06afc1f3e9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1255.576287] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52254bbc-b188-58b5-5bd9-77c787c9e73b, 'name': SearchDatastore_Task, 'duration_secs': 0.053303} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.577184] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9dae9f4b-f5cc-4dce-bd20-bff95317eb86 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.582830] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Waiting for the task: (returnval){ [ 1255.582830] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5241e62e-2a73-f2ad-053c-c88172a34375" [ 1255.582830] env[69927]: _type = "Task" [ 1255.582830] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.591281] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5241e62e-2a73-f2ad-053c-c88172a34375, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.683582] env[69927]: INFO nova.compute.manager [None req-2f8cd047-cfb6-4d5f-905b-e5357c471e5b tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Unrescuing [ 1255.683582] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2f8cd047-cfb6-4d5f-905b-e5357c471e5b tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "refresh_cache-b007a697-7da4-4c97-9ccb-046d86b27568" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1255.683582] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2f8cd047-cfb6-4d5f-905b-e5357c471e5b tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquired lock "refresh_cache-b007a697-7da4-4c97-9ccb-046d86b27568" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1255.683582] env[69927]: DEBUG nova.network.neutron [None req-2f8cd047-cfb6-4d5f-905b-e5357c471e5b tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1255.700873] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Releasing lock "refresh_cache-923b70fc-9959-48cf-8a9f-f8cd7c0c6b19" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1255.701200] env[69927]: DEBUG nova.compute.manager [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Instance network_info: |[{"id": "1e9afa83-f755-4605-8818-5470b4d2bd50", "address": "fa:16:3e:9c:c3:5a", "network": {"id": "5e9a6068-d0f1-44b2-b2a2-b87ebc0d53f2", "bridge": "br-int", "label": "tempest-ServersTestJSON-2067990706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "544f2a021144492ba1aea46ce6075e53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e9afa83-f7", "ovs_interfaceid": "1e9afa83-f755-4605-8818-5470b4d2bd50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1255.701859] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:c3:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ed8a78a1-87dc-488e-a092-afd1c2a2ddde', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e9afa83-f755-4605-8818-5470b4d2bd50', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1255.715126] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1255.716472] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1255.716953] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-50ed8207-4fe1-4108-9ece-15d21123e1fe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.747442] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1255.747442] env[69927]: value = "task-4096889" [ 1255.747442] env[69927]: _type = "Task" [ 1255.747442] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.758511] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096889, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.822436] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096888, 'name': ReconfigVM_Task, 'duration_secs': 0.921964} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.822831] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Reconfigured VM instance instance-00000062 to attach disk [datastore2] 693a6c6b-8d1c-405e-bb17-73259e28f556/693a6c6b-8d1c-405e-bb17-73259e28f556.vmdk or device None with type streamOptimized {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1255.823570] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7e8965b0-73ba-4f25-8445-fb4110cdaa09 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.831898] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1255.831898] env[69927]: value = "task-4096890" [ 1255.831898] env[69927]: _type = "Task" [ 1255.831898] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.843027] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096890, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.912275] env[69927]: DEBUG nova.compute.manager [req-3e8ba0d4-5f85-4fe7-90ca-b534faa7e1e1 req-f1cb5cbe-a2c2-47c3-af3b-3dd26a08e60a service nova] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Received event network-changed-1e9afa83-f755-4605-8818-5470b4d2bd50 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1255.912527] env[69927]: DEBUG nova.compute.manager [req-3e8ba0d4-5f85-4fe7-90ca-b534faa7e1e1 req-f1cb5cbe-a2c2-47c3-af3b-3dd26a08e60a service nova] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Refreshing instance network info cache due to event network-changed-1e9afa83-f755-4605-8818-5470b4d2bd50. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1255.912704] env[69927]: DEBUG oslo_concurrency.lockutils [req-3e8ba0d4-5f85-4fe7-90ca-b534faa7e1e1 req-f1cb5cbe-a2c2-47c3-af3b-3dd26a08e60a service nova] Acquiring lock "refresh_cache-923b70fc-9959-48cf-8a9f-f8cd7c0c6b19" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1255.912850] env[69927]: DEBUG oslo_concurrency.lockutils [req-3e8ba0d4-5f85-4fe7-90ca-b534faa7e1e1 req-f1cb5cbe-a2c2-47c3-af3b-3dd26a08e60a service nova] Acquired lock "refresh_cache-923b70fc-9959-48cf-8a9f-f8cd7c0c6b19" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1255.913036] env[69927]: DEBUG nova.network.neutron [req-3e8ba0d4-5f85-4fe7-90ca-b534faa7e1e1 req-f1cb5cbe-a2c2-47c3-af3b-3dd26a08e60a service nova] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Refreshing network info cache for port 1e9afa83-f755-4605-8818-5470b4d2bd50 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1256.094677] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5241e62e-2a73-f2ad-053c-c88172a34375, 'name': SearchDatastore_Task, 'duration_secs': 0.022527} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.095137] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1256.095299] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11/8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1256.095643] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f41e3f2-be77-4f9e-a6ce-08cfb103937a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.106467] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Waiting for the task: (returnval){ [ 1256.106467] env[69927]: value = "task-4096891" [ 1256.106467] env[69927]: _type = "Task" [ 1256.106467] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.116208] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096891, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.259414] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096889, 'name': CreateVM_Task, 'duration_secs': 0.355804} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.259670] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1256.260420] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1256.260581] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1256.260957] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1256.261249] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf126d85-d355-4617-b25a-f96005c44a5c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.264269] env[69927]: DEBUG nova.network.neutron [None req-7c985137-7aa6-472e-99e0-fb06afc1f3e9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Updating instance_info_cache with network_info: [{"id": "47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07", "address": "fa:16:3e:ed:aa:67", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47e9852f-a4", "ovs_interfaceid": "47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1256.270056] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1256.270056] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52c99794-89b7-42d3-1719-3b0d512c0aa7" [ 1256.270056] env[69927]: _type = "Task" [ 1256.270056] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.284611] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52c99794-89b7-42d3-1719-3b0d512c0aa7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.344115] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096890, 'name': Rename_Task, 'duration_secs': 0.182253} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.344480] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1256.344765] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-128f7f3f-2892-40df-8dbc-e0b64f95840e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.357477] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1256.357477] env[69927]: value = "task-4096892" [ 1256.357477] env[69927]: _type = "Task" [ 1256.357477] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.366788] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096892, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.478658] env[69927]: DEBUG oslo_concurrency.lockutils [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "refresh_cache-8b70b479-4a54-4bcb-813d-16cc0c9a67c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1256.478902] env[69927]: DEBUG oslo_concurrency.lockutils [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquired lock "refresh_cache-8b70b479-4a54-4bcb-813d-16cc0c9a67c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1256.479504] env[69927]: DEBUG nova.network.neutron [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1256.617022] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096891, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496144} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.617411] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11/8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1256.617511] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1256.617777] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d4904942-c5b1-491c-8997-d995bca4016e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.625572] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Waiting for the task: (returnval){ [ 1256.625572] env[69927]: value = "task-4096893" [ 1256.625572] env[69927]: _type = "Task" [ 1256.625572] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.636247] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096893, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.648028] env[69927]: DEBUG nova.network.neutron [None req-2f8cd047-cfb6-4d5f-905b-e5357c471e5b tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Updating instance_info_cache with network_info: [{"id": "037faf17-cf20-417c-ab4d-b0a08944b7d9", "address": "fa:16:3e:10:4b:39", "network": {"id": "527076e8-f800-4686-883d-e70629d8ba0d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-704161827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de9e293e2d1a4e179f01f60e882851b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap037faf17-cf", "ovs_interfaceid": "037faf17-cf20-417c-ab4d-b0a08944b7d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1256.670942] env[69927]: DEBUG nova.network.neutron [req-3e8ba0d4-5f85-4fe7-90ca-b534faa7e1e1 req-f1cb5cbe-a2c2-47c3-af3b-3dd26a08e60a service nova] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Updated VIF entry in instance network info cache for port 1e9afa83-f755-4605-8818-5470b4d2bd50. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1256.671334] env[69927]: DEBUG nova.network.neutron [req-3e8ba0d4-5f85-4fe7-90ca-b534faa7e1e1 req-f1cb5cbe-a2c2-47c3-af3b-3dd26a08e60a service nova] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Updating instance_info_cache with network_info: [{"id": "1e9afa83-f755-4605-8818-5470b4d2bd50", "address": "fa:16:3e:9c:c3:5a", "network": {"id": "5e9a6068-d0f1-44b2-b2a2-b87ebc0d53f2", "bridge": "br-int", "label": "tempest-ServersTestJSON-2067990706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "544f2a021144492ba1aea46ce6075e53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e9afa83-f7", "ovs_interfaceid": "1e9afa83-f755-4605-8818-5470b4d2bd50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1256.767980] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7c985137-7aa6-472e-99e0-fb06afc1f3e9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Releasing lock "refresh_cache-da468d11-82a4-4fec-b06a-1b522bacdbc2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1256.787390] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52c99794-89b7-42d3-1719-3b0d512c0aa7, 'name': SearchDatastore_Task, 'duration_secs': 0.023597} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.787685] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1256.787919] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1256.788175] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1256.788324] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1256.788503] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1256.788773] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e24b133-2867-4447-9d04-c887575d8f8f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.800347] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1256.800547] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1256.801936] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-311fe889-fc70-450d-9e44-3a186493e912 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.807024] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1256.807024] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52781ead-8ce2-32da-ba12-1ffada79112a" [ 1256.807024] env[69927]: _type = "Task" [ 1256.807024] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.815421] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52781ead-8ce2-32da-ba12-1ffada79112a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.868537] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096892, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.136677] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096893, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070103} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.139793] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1257.140770] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d41f67f-f4b3-44c8-8870-3a77141aa86e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.157043] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2f8cd047-cfb6-4d5f-905b-e5357c471e5b tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Releasing lock "refresh_cache-b007a697-7da4-4c97-9ccb-046d86b27568" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1257.157691] env[69927]: DEBUG nova.objects.instance [None req-2f8cd047-cfb6-4d5f-905b-e5357c471e5b tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lazy-loading 'flavor' on Instance uuid b007a697-7da4-4c97-9ccb-046d86b27568 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1257.167528] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11/8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1257.168252] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2675d24f-faeb-40da-9669-8b80153de249 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.186511] env[69927]: DEBUG oslo_concurrency.lockutils [req-3e8ba0d4-5f85-4fe7-90ca-b534faa7e1e1 req-f1cb5cbe-a2c2-47c3-af3b-3dd26a08e60a service nova] Releasing lock "refresh_cache-923b70fc-9959-48cf-8a9f-f8cd7c0c6b19" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1257.194190] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Waiting for the task: (returnval){ [ 1257.194190] env[69927]: value = "task-4096894" [ 1257.194190] env[69927]: _type = "Task" [ 1257.194190] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.204804] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096894, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.223038] env[69927]: DEBUG nova.network.neutron [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Updating instance_info_cache with network_info: [{"id": "3e85edce-fa8a-45d4-b109-5bdd98a06303", "address": "fa:16:3e:df:fd:25", "network": {"id": "bd90fedf-a086-42e6-9814-07044a035f6e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-951403655-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cf6bb3492c642aa9a168e484299289c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e85edce-fa", "ovs_interfaceid": "3e85edce-fa8a-45d4-b109-5bdd98a06303", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1257.273430] env[69927]: DEBUG nova.compute.manager [None req-7c985137-7aa6-472e-99e0-fb06afc1f3e9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1257.273993] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53a1f82f-abb9-454c-875c-ce8328422bde {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.317370] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52781ead-8ce2-32da-ba12-1ffada79112a, 'name': SearchDatastore_Task, 'duration_secs': 0.029163} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.318776] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab7e89f2-a713-4880-874f-73498efc52aa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.326166] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1257.326166] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a4c43f-aab0-55e8-58fe-137431a7c1ae" [ 1257.326166] env[69927]: _type = "Task" [ 1257.326166] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.335032] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a4c43f-aab0-55e8-58fe-137431a7c1ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.369775] env[69927]: DEBUG oslo_vmware.api [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096892, 'name': PowerOnVM_Task, 'duration_secs': 0.855505} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.370194] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1257.472063] env[69927]: DEBUG nova.compute.manager [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1257.473036] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6deab750-5302-467d-914a-87906dec5cc8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.672474] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c9b4b01-020b-44ab-a703-f9f5396390d2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.697962] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f8cd047-cfb6-4d5f-905b-e5357c471e5b tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1257.701326] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d0bca114-5f47-488a-beb7-76cf46697d4b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.708816] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096894, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.710030] env[69927]: DEBUG oslo_vmware.api [None req-2f8cd047-cfb6-4d5f-905b-e5357c471e5b tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1257.710030] env[69927]: value = "task-4096895" [ 1257.710030] env[69927]: _type = "Task" [ 1257.710030] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.718691] env[69927]: DEBUG oslo_vmware.api [None req-2f8cd047-cfb6-4d5f-905b-e5357c471e5b tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096895, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.726489] env[69927]: DEBUG oslo_concurrency.lockutils [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Releasing lock "refresh_cache-8b70b479-4a54-4bcb-813d-16cc0c9a67c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1257.838279] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a4c43f-aab0-55e8-58fe-137431a7c1ae, 'name': SearchDatastore_Task, 'duration_secs': 0.035914} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.838640] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1257.838960] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19/923b70fc-9959-48cf-8a9f-f8cd7c0c6b19.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1257.839266] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f9e39045-ca28-4c68-ad17-fae55f406c0b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.846520] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1257.846520] env[69927]: value = "task-4096896" [ 1257.846520] env[69927]: _type = "Task" [ 1257.846520] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.855031] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "853d85d4-e98f-4810-a8db-b2a820ebc071" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1257.855309] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "853d85d4-e98f-4810-a8db-b2a820ebc071" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1257.855535] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "853d85d4-e98f-4810-a8db-b2a820ebc071-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1257.855723] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "853d85d4-e98f-4810-a8db-b2a820ebc071-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1257.855907] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "853d85d4-e98f-4810-a8db-b2a820ebc071-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1257.857952] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096896, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.859052] env[69927]: INFO nova.compute.manager [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Terminating instance [ 1257.991706] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4edb8a37-ed13-4fba-b129-994714adb2a4 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "693a6c6b-8d1c-405e-bb17-73259e28f556" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 28.449s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1258.211614] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096894, 'name': ReconfigVM_Task, 'duration_secs': 0.701811} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.215802] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Reconfigured VM instance instance-00000073 to attach disk [datastore2] 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11/8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1258.216675] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8ad0f3ba-c576-4a64-9916-a0ffe5a8261e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.224859] env[69927]: DEBUG oslo_vmware.api [None req-2f8cd047-cfb6-4d5f-905b-e5357c471e5b tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096895, 'name': PowerOffVM_Task, 'duration_secs': 0.311087} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.226371] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f8cd047-cfb6-4d5f-905b-e5357c471e5b tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1258.232060] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f8cd047-cfb6-4d5f-905b-e5357c471e5b tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Reconfiguring VM instance instance-0000006b to detach disk 2002 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1258.232549] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Waiting for the task: (returnval){ [ 1258.232549] env[69927]: value = "task-4096897" [ 1258.232549] env[69927]: _type = "Task" [ 1258.232549] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.235903] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50be83e7-9857-411f-8b97-54811caa68d2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.256011] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6e045e-e7b2-49a2-9743-cb3692af49bc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.267677] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84af05f-08b9-4d03-af96-2ec361683491 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.272053] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096897, 'name': Rename_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.273174] env[69927]: DEBUG oslo_vmware.api [None req-2f8cd047-cfb6-4d5f-905b-e5357c471e5b tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1258.273174] env[69927]: value = "task-4096898" [ 1258.273174] env[69927]: _type = "Task" [ 1258.273174] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.285749] env[69927]: DEBUG oslo_vmware.api [None req-2f8cd047-cfb6-4d5f-905b-e5357c471e5b tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.295795] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d4eeb4-9771-467d-9a5a-393415b9ef93 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.304537] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7c985137-7aa6-472e-99e0-fb06afc1f3e9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Doing hard reboot of VM {{(pid=69927) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1258.304846] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-3277df65-d66a-46a3-b70a-74181f873d50 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.316522] env[69927]: DEBUG oslo_vmware.api [None req-7c985137-7aa6-472e-99e0-fb06afc1f3e9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1258.316522] env[69927]: value = "task-4096899" [ 1258.316522] env[69927]: _type = "Task" [ 1258.316522] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.328711] env[69927]: DEBUG oslo_vmware.api [None req-7c985137-7aa6-472e-99e0-fb06afc1f3e9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096899, 'name': ResetVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.358274] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096896, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.363292] env[69927]: DEBUG nova.compute.manager [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1258.363552] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1258.364549] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c54f04ab-edb1-487f-8f2a-083d36ba93e5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.373857] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1258.374212] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-498ab5e0-3f9c-4124-ae6c-273ee0e132e8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.382202] env[69927]: DEBUG oslo_vmware.api [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1258.382202] env[69927]: value = "task-4096900" [ 1258.382202] env[69927]: _type = "Task" [ 1258.382202] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.394762] env[69927]: DEBUG oslo_vmware.api [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096900, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.760053] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096897, 'name': Rename_Task, 'duration_secs': 0.234889} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.760312] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1258.760492] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0449487b-6a94-4c23-a600-c5be0d6a9cf7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.769136] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Waiting for the task: (returnval){ [ 1258.769136] env[69927]: value = "task-4096901" [ 1258.769136] env[69927]: _type = "Task" [ 1258.769136] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.780709] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096901, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.786240] env[69927]: DEBUG oslo_vmware.api [None req-2f8cd047-cfb6-4d5f-905b-e5357c471e5b tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096898, 'name': ReconfigVM_Task, 'duration_secs': 0.345978} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.786527] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f8cd047-cfb6-4d5f-905b-e5357c471e5b tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Reconfigured VM instance instance-0000006b to detach disk 2002 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1258.787309] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f8cd047-cfb6-4d5f-905b-e5357c471e5b tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1258.787309] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-44eca22b-6ac3-44fa-845e-309ca0f7b1fb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.802053] env[69927]: DEBUG oslo_vmware.api [None req-2f8cd047-cfb6-4d5f-905b-e5357c471e5b tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1258.802053] env[69927]: value = "task-4096902" [ 1258.802053] env[69927]: _type = "Task" [ 1258.802053] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.811795] env[69927]: DEBUG oslo_vmware.api [None req-2f8cd047-cfb6-4d5f-905b-e5357c471e5b tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096902, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.828096] env[69927]: DEBUG oslo_vmware.api [None req-7c985137-7aa6-472e-99e0-fb06afc1f3e9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096899, 'name': ResetVM_Task, 'duration_secs': 0.11066} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.828565] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7c985137-7aa6-472e-99e0-fb06afc1f3e9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Did hard reboot of VM {{(pid=69927) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1258.828893] env[69927]: DEBUG nova.compute.manager [None req-7c985137-7aa6-472e-99e0-fb06afc1f3e9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1258.829675] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87449235-0359-4c75-a3e7-0cb161fec29f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.859172] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096896, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520758} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.859584] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19/923b70fc-9959-48cf-8a9f-f8cd7c0c6b19.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1258.859777] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1258.860021] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fb7658a9-10f1-44b8-b98d-4c5f8f87cafe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.869143] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1258.869143] env[69927]: value = "task-4096903" [ 1258.869143] env[69927]: _type = "Task" [ 1258.869143] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.878320] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096903, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.892341] env[69927]: DEBUG oslo_vmware.api [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096900, 'name': PowerOffVM_Task, 'duration_secs': 0.338589} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.892603] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1258.892776] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1258.893027] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ff3a8c50-7391-4fa0-bf6e-e0989363f977 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.989517] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1258.989789] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1258.989977] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Deleting the datastore file [datastore1] 853d85d4-e98f-4810-a8db-b2a820ebc071 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1258.990272] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b172bc4a-6840-4ce6-a22a-e119ca1a1ddd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.999855] env[69927]: DEBUG oslo_vmware.api [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1258.999855] env[69927]: value = "task-4096905" [ 1258.999855] env[69927]: _type = "Task" [ 1258.999855] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.010104] env[69927]: DEBUG oslo_vmware.api [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096905, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.283792] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096901, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.312855] env[69927]: DEBUG oslo_vmware.api [None req-2f8cd047-cfb6-4d5f-905b-e5357c471e5b tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096902, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.343409] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7c985137-7aa6-472e-99e0-fb06afc1f3e9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "da468d11-82a4-4fec-b06a-1b522bacdbc2" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.354s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1259.380123] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096903, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.231038} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.380462] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1259.381264] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-749db662-119a-4cae-973c-3faa05de18f3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.406179] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19/923b70fc-9959-48cf-8a9f-f8cd7c0c6b19.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1259.406566] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-489dca6f-cd0e-40ca-ba62-a95d6408bded {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.431967] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1259.431967] env[69927]: value = "task-4096906" [ 1259.431967] env[69927]: _type = "Task" [ 1259.431967] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.445114] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096906, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.513641] env[69927]: DEBUG oslo_vmware.api [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096905, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.406716} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.514182] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1259.514336] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1259.514534] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1259.514985] env[69927]: INFO nova.compute.manager [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1259.514985] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1259.515228] env[69927]: DEBUG nova.compute.manager [-] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1259.515326] env[69927]: DEBUG nova.network.neutron [-] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1259.531462] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e92f6efc-af98-45d2-ba09-e82655e87dfa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.555375] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0186cc79-ec27-4068-8609-ffc79350a26b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.565748] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Updating instance '8b70b479-4a54-4bcb-813d-16cc0c9a67c5' progress to 83 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1259.781811] env[69927]: DEBUG oslo_vmware.api [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096901, 'name': PowerOnVM_Task, 'duration_secs': 0.86201} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.781811] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1259.782198] env[69927]: INFO nova.compute.manager [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Took 10.48 seconds to spawn the instance on the hypervisor. [ 1259.782277] env[69927]: DEBUG nova.compute.manager [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1259.783205] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ed1cf1-f245-487b-a49c-14db42c82444 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.812898] env[69927]: DEBUG nova.compute.manager [req-da503479-bb67-467f-90b8-fb313f663962 req-d9851447-2086-4c30-8702-b40f60383d13 service nova] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Received event network-vif-deleted-f6b836b5-0070-4fdc-8398-6bd6efe3e550 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1259.813226] env[69927]: INFO nova.compute.manager [req-da503479-bb67-467f-90b8-fb313f663962 req-d9851447-2086-4c30-8702-b40f60383d13 service nova] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Neutron deleted interface f6b836b5-0070-4fdc-8398-6bd6efe3e550; detaching it from the instance and deleting it from the info cache [ 1259.813678] env[69927]: DEBUG nova.network.neutron [req-da503479-bb67-467f-90b8-fb313f663962 req-d9851447-2086-4c30-8702-b40f60383d13 service nova] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1259.821971] env[69927]: DEBUG oslo_vmware.api [None req-2f8cd047-cfb6-4d5f-905b-e5357c471e5b tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096902, 'name': PowerOnVM_Task, 'duration_secs': 0.890541} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.822412] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f8cd047-cfb6-4d5f-905b-e5357c471e5b tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1259.822734] env[69927]: DEBUG nova.compute.manager [None req-2f8cd047-cfb6-4d5f-905b-e5357c471e5b tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1259.823811] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-617ab095-a3a8-4529-9584-8fba99ece5f0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.944315] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096906, 'name': ReconfigVM_Task, 'duration_secs': 0.441075} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.944762] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Reconfigured VM instance instance-00000074 to attach disk [datastore1] 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19/923b70fc-9959-48cf-8a9f-f8cd7c0c6b19.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1259.945571] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-65972690-5763-451d-a323-509e784374a8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.953589] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1259.953589] env[69927]: value = "task-4096907" [ 1259.953589] env[69927]: _type = "Task" [ 1259.953589] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.962422] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096907, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.075158] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1260.075487] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-60bf31ae-13db-4934-af8b-d390f3876599 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.083736] env[69927]: DEBUG oslo_vmware.api [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1260.083736] env[69927]: value = "task-4096908" [ 1260.083736] env[69927]: _type = "Task" [ 1260.083736] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.096110] env[69927]: DEBUG oslo_vmware.api [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096908, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.289557] env[69927]: DEBUG nova.network.neutron [-] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1260.309331] env[69927]: INFO nova.compute.manager [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Took 17.07 seconds to build instance. [ 1260.317212] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c6b61d51-c10d-4f4f-99a3-18060851896d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.331407] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e82c70-6c49-4ee8-9ca6-23d5d822332b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.377822] env[69927]: DEBUG nova.compute.manager [req-da503479-bb67-467f-90b8-fb313f663962 req-d9851447-2086-4c30-8702-b40f60383d13 service nova] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Detach interface failed, port_id=f6b836b5-0070-4fdc-8398-6bd6efe3e550, reason: Instance 853d85d4-e98f-4810-a8db-b2a820ebc071 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1260.467250] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096907, 'name': Rename_Task, 'duration_secs': 0.15884} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.467761] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1260.468132] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-15a5489e-5244-4303-8a4a-14625ecc4a11 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.477099] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1260.477099] env[69927]: value = "task-4096909" [ 1260.477099] env[69927]: _type = "Task" [ 1260.477099] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.491803] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096909, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.596983] env[69927]: DEBUG oslo_vmware.api [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096908, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.676427] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1260.676427] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._sync_power_states {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1260.793184] env[69927]: INFO nova.compute.manager [-] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Took 1.28 seconds to deallocate network for instance. [ 1260.813464] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5546fa18-e605-404a-a3b2-8156192f86b0 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Lock "8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.581s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1260.988134] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096909, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.077485] env[69927]: INFO nova.compute.manager [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Rescuing [ 1261.077857] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Acquiring lock "refresh_cache-8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1261.078108] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Acquired lock "refresh_cache-8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1261.078367] env[69927]: DEBUG nova.network.neutron [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1261.098393] env[69927]: DEBUG oslo_vmware.api [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096908, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.180683] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Getting list of instances from cluster (obj){ [ 1261.180683] env[69927]: value = "domain-c8" [ 1261.180683] env[69927]: _type = "ClusterComputeResource" [ 1261.180683] env[69927]: } {{(pid=69927) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1261.181784] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-925b067e-7b83-44a9-9f04-9e07782d358c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.205127] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Got total of 12 instances {{(pid=69927) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1261.205323] env[69927]: WARNING nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] While synchronizing instance power states, found 13 instances in the database and 12 instances on the hypervisor. [ 1261.205459] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Triggering sync for uuid b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a {{(pid=69927) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1261.205699] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Triggering sync for uuid 15c44d86-829f-4317-ab66-9e61d4fb4dd0 {{(pid=69927) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1261.205919] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Triggering sync for uuid cff307ed-3c8b-4126-9749-1204597cbf6c {{(pid=69927) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1261.206145] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Triggering sync for uuid 693a6c6b-8d1c-405e-bb17-73259e28f556 {{(pid=69927) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1261.206308] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Triggering sync for uuid da468d11-82a4-4fec-b06a-1b522bacdbc2 {{(pid=69927) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1261.206465] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Triggering sync for uuid 8b70b479-4a54-4bcb-813d-16cc0c9a67c5 {{(pid=69927) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1261.206618] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Triggering sync for uuid b007a697-7da4-4c97-9ccb-046d86b27568 {{(pid=69927) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1261.206768] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Triggering sync for uuid afdd23d0-c8e0-4d49-a188-525b6b3f31c8 {{(pid=69927) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1261.206914] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Triggering sync for uuid 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d {{(pid=69927) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1261.207075] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Triggering sync for uuid 659e2584-88a8-4382-98c8-f50fcab78e0c {{(pid=69927) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1261.207228] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Triggering sync for uuid 853d85d4-e98f-4810-a8db-b2a820ebc071 {{(pid=69927) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1261.207449] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Triggering sync for uuid 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11 {{(pid=69927) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1261.207581] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Triggering sync for uuid 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19 {{(pid=69927) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1261.207922] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.208162] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.208440] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "15c44d86-829f-4317-ab66-9e61d4fb4dd0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.208626] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "15c44d86-829f-4317-ab66-9e61d4fb4dd0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.208875] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "cff307ed-3c8b-4126-9749-1204597cbf6c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.209086] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "cff307ed-3c8b-4126-9749-1204597cbf6c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.209331] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "693a6c6b-8d1c-405e-bb17-73259e28f556" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.209545] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "693a6c6b-8d1c-405e-bb17-73259e28f556" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.209777] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "da468d11-82a4-4fec-b06a-1b522bacdbc2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.209951] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "da468d11-82a4-4fec-b06a-1b522bacdbc2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.210196] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "8b70b479-4a54-4bcb-813d-16cc0c9a67c5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.210381] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "8b70b479-4a54-4bcb-813d-16cc0c9a67c5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.210547] env[69927]: INFO nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] During sync_power_state the instance has a pending task (resize_finish). Skip. [ 1261.210711] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "8b70b479-4a54-4bcb-813d-16cc0c9a67c5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.210899] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "b007a697-7da4-4c97-9ccb-046d86b27568" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.211084] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "b007a697-7da4-4c97-9ccb-046d86b27568" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.211317] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "afdd23d0-c8e0-4d49-a188-525b6b3f31c8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.211510] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "afdd23d0-c8e0-4d49-a188-525b6b3f31c8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.211723] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.211934] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.212194] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "659e2584-88a8-4382-98c8-f50fcab78e0c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.212371] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "659e2584-88a8-4382-98c8-f50fcab78e0c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.212723] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "853d85d4-e98f-4810-a8db-b2a820ebc071" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.212946] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.213139] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.213301] env[69927]: INFO nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] During sync_power_state the instance has a pending task (rescuing). Skip. [ 1261.213457] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.213639] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "923b70fc-9959-48cf-8a9f-f8cd7c0c6b19" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.214524] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b540d6-f3c1-4873-8155-afb6f6c9ac74 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.217620] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e730f0c7-45dc-46a6-9b43-5ba53d68acad {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.220644] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63cf1dff-b1f7-4b51-acff-eda2314e8f97 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.223412] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f136d45-374d-4887-a68f-8e4496b13fcb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.226096] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0285c9-3cbf-4637-bdc7-4280e59d7549 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.228663] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f20855b-2e07-4d50-b698-a10e456d688e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.233719] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9123be85-778d-40e3-b8a2-0e0b41e960a4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.236773] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cdd087f-c41d-45ee-958b-d48c794c1b27 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.239970] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-999eabda-c948-47e8-ab4c-31a163f07826 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.302091] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.302389] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.303456] env[69927]: DEBUG nova.objects.instance [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lazy-loading 'resources' on Instance uuid 853d85d4-e98f-4810-a8db-b2a820ebc071 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1261.489327] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096909, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.597016] env[69927]: DEBUG oslo_vmware.api [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096908, 'name': PowerOnVM_Task, 'duration_secs': 1.064417} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.597444] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1261.597772] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-232e9ac8-c1b5-407f-b84d-82441c8a1676 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Updating instance '8b70b479-4a54-4bcb-813d-16cc0c9a67c5' progress to 100 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1261.758596] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.550s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.765849] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "15c44d86-829f-4317-ab66-9e61d4fb4dd0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.557s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.769671] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "693a6c6b-8d1c-405e-bb17-73259e28f556" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.560s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.770307] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "da468d11-82a4-4fec-b06a-1b522bacdbc2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.560s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.773711] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "cff307ed-3c8b-4126-9749-1204597cbf6c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.565s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.777169] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "afdd23d0-c8e0-4d49-a188-525b6b3f31c8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.566s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.778594] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.567s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.782325] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "659e2584-88a8-4382-98c8-f50fcab78e0c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.570s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.783185] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "b007a697-7da4-4c97-9ccb-046d86b27568" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.572s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.837787] env[69927]: DEBUG nova.compute.manager [req-ffac629c-f4e4-4126-9998-7afa074db1da req-7021f0e5-dc76-4bf1-b3e4-9c8a40a01430 service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Received event network-changed-037faf17-cf20-417c-ab4d-b0a08944b7d9 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1261.838331] env[69927]: DEBUG nova.compute.manager [req-ffac629c-f4e4-4126-9998-7afa074db1da req-7021f0e5-dc76-4bf1-b3e4-9c8a40a01430 service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Refreshing instance network info cache due to event network-changed-037faf17-cf20-417c-ab4d-b0a08944b7d9. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1261.838998] env[69927]: DEBUG oslo_concurrency.lockutils [req-ffac629c-f4e4-4126-9998-7afa074db1da req-7021f0e5-dc76-4bf1-b3e4-9c8a40a01430 service nova] Acquiring lock "refresh_cache-b007a697-7da4-4c97-9ccb-046d86b27568" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1261.838998] env[69927]: DEBUG oslo_concurrency.lockutils [req-ffac629c-f4e4-4126-9998-7afa074db1da req-7021f0e5-dc76-4bf1-b3e4-9c8a40a01430 service nova] Acquired lock "refresh_cache-b007a697-7da4-4c97-9ccb-046d86b27568" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1261.838998] env[69927]: DEBUG nova.network.neutron [req-ffac629c-f4e4-4126-9998-7afa074db1da req-7021f0e5-dc76-4bf1-b3e4-9c8a40a01430 service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Refreshing network info cache for port 037faf17-cf20-417c-ab4d-b0a08944b7d9 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1261.869253] env[69927]: DEBUG nova.network.neutron [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Updating instance_info_cache with network_info: [{"id": "1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2", "address": "fa:16:3e:8e:7a:45", "network": {"id": "25e3d194-9d78-4e94-abb8-b5f521eb1990", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-666062733-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bf75fa4b8ec6436999f00b7cb4b57e24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ce221a9-57", "ovs_interfaceid": "1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1261.989125] env[69927]: DEBUG oslo_vmware.api [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096909, 'name': PowerOnVM_Task, 'duration_secs': 1.047362} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.991983] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1261.992228] env[69927]: INFO nova.compute.manager [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Took 8.55 seconds to spawn the instance on the hypervisor. [ 1261.992407] env[69927]: DEBUG nova.compute.manager [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1261.993425] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286b9c4f-4bf7-4042-a642-cf4e0dd653e0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.017332] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db5568d-10f7-45fe-9107-e664dd004e1b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.026561] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d52408f-0a9c-41f2-9756-7174a3a4dff3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.061131] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdbfecfa-612f-4bb1-a4e3-f8256ad1e5af {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.070638] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e5f0650-02e7-42b3-baf4-fc8ffab99de6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.086801] env[69927]: DEBUG nova.compute.provider_tree [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1262.372420] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Releasing lock "refresh_cache-8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1262.517323] env[69927]: INFO nova.compute.manager [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Took 17.14 seconds to build instance. [ 1262.593437] env[69927]: DEBUG nova.network.neutron [req-ffac629c-f4e4-4126-9998-7afa074db1da req-7021f0e5-dc76-4bf1-b3e4-9c8a40a01430 service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Updated VIF entry in instance network info cache for port 037faf17-cf20-417c-ab4d-b0a08944b7d9. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1262.593603] env[69927]: DEBUG nova.network.neutron [req-ffac629c-f4e4-4126-9998-7afa074db1da req-7021f0e5-dc76-4bf1-b3e4-9c8a40a01430 service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Updating instance_info_cache with network_info: [{"id": "037faf17-cf20-417c-ab4d-b0a08944b7d9", "address": "fa:16:3e:10:4b:39", "network": {"id": "527076e8-f800-4686-883d-e70629d8ba0d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-704161827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de9e293e2d1a4e179f01f60e882851b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap037faf17-cf", "ovs_interfaceid": "037faf17-cf20-417c-ab4d-b0a08944b7d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1262.611156] env[69927]: ERROR nova.scheduler.client.report [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [req-fededfe3-fd23-4f3d-94e7-fce797ba9e0f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2f529b36-df5f-4b37-8103-68f74f737726. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-fededfe3-fd23-4f3d-94e7-fce797ba9e0f"}]} [ 1262.628501] env[69927]: DEBUG nova.scheduler.client.report [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Refreshing inventories for resource provider 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1262.651499] env[69927]: DEBUG nova.scheduler.client.report [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Updating ProviderTree inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1262.651801] env[69927]: DEBUG nova.compute.provider_tree [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1262.665193] env[69927]: DEBUG nova.scheduler.client.report [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Refreshing aggregate associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, aggregates: None {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1262.685178] env[69927]: DEBUG nova.scheduler.client.report [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Refreshing trait associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1262.868345] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6825d226-f4eb-4ad9-bf09-d2c41681046a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.877928] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c02f0ef-1c10-40a0-9613-ca09a6a788e0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.916424] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6897ee6a-3615-4529-a9d0-dbc03621a894 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.926900] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84cf358d-9a98-4e0b-8c25-f9e4a56b463a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.944188] env[69927]: DEBUG nova.compute.provider_tree [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1263.022050] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dd39d6ef-3fba-4d69-9cde-5fd2d211f0e5 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "923b70fc-9959-48cf-8a9f-f8cd7c0c6b19" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.654s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1263.022508] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "923b70fc-9959-48cf-8a9f-f8cd7c0c6b19" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 1.809s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1263.022697] env[69927]: INFO nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] During sync_power_state the instance has a pending task (spawning). Skip. [ 1263.022867] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "923b70fc-9959-48cf-8a9f-f8cd7c0c6b19" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1263.041182] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.099047] env[69927]: DEBUG oslo_concurrency.lockutils [req-ffac629c-f4e4-4126-9998-7afa074db1da req-7021f0e5-dc76-4bf1-b3e4-9c8a40a01430 service nova] Releasing lock "refresh_cache-b007a697-7da4-4c97-9ccb-046d86b27568" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1263.099047] env[69927]: DEBUG nova.compute.manager [req-ffac629c-f4e4-4126-9998-7afa074db1da req-7021f0e5-dc76-4bf1-b3e4-9c8a40a01430 service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Received event network-changed-037faf17-cf20-417c-ab4d-b0a08944b7d9 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1263.099047] env[69927]: DEBUG nova.compute.manager [req-ffac629c-f4e4-4126-9998-7afa074db1da req-7021f0e5-dc76-4bf1-b3e4-9c8a40a01430 service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Refreshing instance network info cache due to event network-changed-037faf17-cf20-417c-ab4d-b0a08944b7d9. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1263.099047] env[69927]: DEBUG oslo_concurrency.lockutils [req-ffac629c-f4e4-4126-9998-7afa074db1da req-7021f0e5-dc76-4bf1-b3e4-9c8a40a01430 service nova] Acquiring lock "refresh_cache-b007a697-7da4-4c97-9ccb-046d86b27568" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1263.099047] env[69927]: DEBUG oslo_concurrency.lockutils [req-ffac629c-f4e4-4126-9998-7afa074db1da req-7021f0e5-dc76-4bf1-b3e4-9c8a40a01430 service nova] Acquired lock "refresh_cache-b007a697-7da4-4c97-9ccb-046d86b27568" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1263.099047] env[69927]: DEBUG nova.network.neutron [req-ffac629c-f4e4-4126-9998-7afa074db1da req-7021f0e5-dc76-4bf1-b3e4-9c8a40a01430 service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Refreshing network info cache for port 037faf17-cf20-417c-ab4d-b0a08944b7d9 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1263.224301] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6ef261ec-27ec-4f4a-8de0-03ebd502ea25 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "afdd23d0-c8e0-4d49-a188-525b6b3f31c8" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1263.224453] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6ef261ec-27ec-4f4a-8de0-03ebd502ea25 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "afdd23d0-c8e0-4d49-a188-525b6b3f31c8" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1263.476636] env[69927]: DEBUG nova.scheduler.client.report [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Updated inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 with generation 166 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1263.476944] env[69927]: DEBUG nova.compute.provider_tree [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Updating resource provider 2f529b36-df5f-4b37-8103-68f74f737726 generation from 166 to 167 during operation: update_inventory {{(pid=69927) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1263.477160] env[69927]: DEBUG nova.compute.provider_tree [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1263.507410] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.507646] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.586962] env[69927]: DEBUG oslo_concurrency.lockutils [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "923b70fc-9959-48cf-8a9f-f8cd7c0c6b19" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1263.587255] env[69927]: DEBUG oslo_concurrency.lockutils [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "923b70fc-9959-48cf-8a9f-f8cd7c0c6b19" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1263.587533] env[69927]: DEBUG oslo_concurrency.lockutils [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "923b70fc-9959-48cf-8a9f-f8cd7c0c6b19-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1263.587726] env[69927]: DEBUG oslo_concurrency.lockutils [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "923b70fc-9959-48cf-8a9f-f8cd7c0c6b19-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1263.587904] env[69927]: DEBUG oslo_concurrency.lockutils [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "923b70fc-9959-48cf-8a9f-f8cd7c0c6b19-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1263.592232] env[69927]: INFO nova.compute.manager [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Terminating instance [ 1263.623205] env[69927]: DEBUG nova.network.neutron [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Port 3e85edce-fa8a-45d4-b109-5bdd98a06303 binding to destination host cpu-1 is already ACTIVE {{(pid=69927) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1263.623494] env[69927]: DEBUG oslo_concurrency.lockutils [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "refresh_cache-8b70b479-4a54-4bcb-813d-16cc0c9a67c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1263.623666] env[69927]: DEBUG oslo_concurrency.lockutils [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquired lock "refresh_cache-8b70b479-4a54-4bcb-813d-16cc0c9a67c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1263.623916] env[69927]: DEBUG nova.network.neutron [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1263.728367] env[69927]: DEBUG nova.compute.utils [None req-6ef261ec-27ec-4f4a-8de0-03ebd502ea25 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1263.818609] env[69927]: DEBUG nova.network.neutron [req-ffac629c-f4e4-4126-9998-7afa074db1da req-7021f0e5-dc76-4bf1-b3e4-9c8a40a01430 service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Updated VIF entry in instance network info cache for port 037faf17-cf20-417c-ab4d-b0a08944b7d9. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1263.819016] env[69927]: DEBUG nova.network.neutron [req-ffac629c-f4e4-4126-9998-7afa074db1da req-7021f0e5-dc76-4bf1-b3e4-9c8a40a01430 service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Updating instance_info_cache with network_info: [{"id": "037faf17-cf20-417c-ab4d-b0a08944b7d9", "address": "fa:16:3e:10:4b:39", "network": {"id": "527076e8-f800-4686-883d-e70629d8ba0d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-704161827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de9e293e2d1a4e179f01f60e882851b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap037faf17-cf", "ovs_interfaceid": "037faf17-cf20-417c-ab4d-b0a08944b7d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1263.918531] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1263.918861] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4f51c9ff-e768-4c73-a7c5-44648535ee56 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.926523] env[69927]: DEBUG oslo_vmware.api [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Waiting for the task: (returnval){ [ 1263.926523] env[69927]: value = "task-4096910" [ 1263.926523] env[69927]: _type = "Task" [ 1263.926523] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.936304] env[69927]: DEBUG oslo_vmware.api [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096910, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.982330] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.680s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1264.006392] env[69927]: INFO nova.scheduler.client.report [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Deleted allocations for instance 853d85d4-e98f-4810-a8db-b2a820ebc071 [ 1264.095627] env[69927]: DEBUG nova.compute.manager [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1264.095887] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1264.096857] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79d8f42-bf60-4357-b3b9-f763481f52f0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.105484] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1264.105783] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c1126077-669d-4334-82ad-3ea620bf5718 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.113312] env[69927]: DEBUG oslo_vmware.api [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1264.113312] env[69927]: value = "task-4096911" [ 1264.113312] env[69927]: _type = "Task" [ 1264.113312] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.124089] env[69927]: DEBUG oslo_vmware.api [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096911, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.231541] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6ef261ec-27ec-4f4a-8de0-03ebd502ea25 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "afdd23d0-c8e0-4d49-a188-525b6b3f31c8" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1264.321700] env[69927]: DEBUG oslo_concurrency.lockutils [req-ffac629c-f4e4-4126-9998-7afa074db1da req-7021f0e5-dc76-4bf1-b3e4-9c8a40a01430 service nova] Releasing lock "refresh_cache-b007a697-7da4-4c97-9ccb-046d86b27568" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1264.438196] env[69927]: DEBUG oslo_vmware.api [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096910, 'name': PowerOffVM_Task, 'duration_secs': 0.449113} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.438501] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1264.439367] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35f4edf-ab22-45dd-bcdf-57ddedc7f3cf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.459165] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da7d6008-7c73-4935-912d-503cfd693b86 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.462911] env[69927]: DEBUG nova.network.neutron [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Updating instance_info_cache with network_info: [{"id": "3e85edce-fa8a-45d4-b109-5bdd98a06303", "address": "fa:16:3e:df:fd:25", "network": {"id": "bd90fedf-a086-42e6-9814-07044a035f6e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-951403655-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cf6bb3492c642aa9a168e484299289c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e85edce-fa", "ovs_interfaceid": "3e85edce-fa8a-45d4-b109-5bdd98a06303", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1264.498559] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1264.498941] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82692473-b885-43cf-80d0-b646f1f91ae4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.507711] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1264.508477] env[69927]: DEBUG oslo_vmware.api [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Waiting for the task: (returnval){ [ 1264.508477] env[69927]: value = "task-4096912" [ 1264.508477] env[69927]: _type = "Task" [ 1264.508477] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.515616] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b37c52ec-abb9-4588-9c98-2f343053283c tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "853d85d4-e98f-4810-a8db-b2a820ebc071" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.660s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1264.516735] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "853d85d4-e98f-4810-a8db-b2a820ebc071" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 3.304s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1264.517050] env[69927]: INFO nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] During sync_power_state the instance has a pending task (deleting). Skip. [ 1264.517286] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "853d85d4-e98f-4810-a8db-b2a820ebc071" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1264.522671] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] VM already powered off {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1264.522893] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1264.523160] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1264.523310] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1264.523491] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1264.523735] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a6eb5347-87ac-4fd7-8eda-bda3a84b0b99 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.533487] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1264.533699] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1264.535023] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1017c08-c9a5-494c-a003-231d8d86c7f7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.540703] env[69927]: DEBUG oslo_vmware.api [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Waiting for the task: (returnval){ [ 1264.540703] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]524e913b-0ce0-60cb-ded7-f566d3f8e332" [ 1264.540703] env[69927]: _type = "Task" [ 1264.540703] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.548383] env[69927]: DEBUG oslo_vmware.api [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]524e913b-0ce0-60cb-ded7-f566d3f8e332, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.623324] env[69927]: DEBUG oslo_vmware.api [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096911, 'name': PowerOffVM_Task, 'duration_secs': 0.268723} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.623590] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1264.623761] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1264.624017] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0bf95152-f88f-48a4-b5d6-8205eff2274f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.705405] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1264.705675] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1264.705892] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Deleting the datastore file [datastore1] 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1264.706183] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac09d544-7454-429f-b5b4-e7e6820522f1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.713587] env[69927]: DEBUG oslo_vmware.api [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1264.713587] env[69927]: value = "task-4096914" [ 1264.713587] env[69927]: _type = "Task" [ 1264.713587] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.723913] env[69927]: DEBUG oslo_vmware.api [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096914, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.965821] env[69927]: DEBUG oslo_concurrency.lockutils [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Releasing lock "refresh_cache-8b70b479-4a54-4bcb-813d-16cc0c9a67c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1265.052045] env[69927]: DEBUG oslo_vmware.api [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]524e913b-0ce0-60cb-ded7-f566d3f8e332, 'name': SearchDatastore_Task, 'duration_secs': 0.009113} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.052869] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f57080ce-5288-43fc-9de1-1dd4dd3e783e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.058917] env[69927]: DEBUG oslo_vmware.api [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Waiting for the task: (returnval){ [ 1265.058917] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520b8a33-6bb3-9a59-4e3f-34d8cfacb46c" [ 1265.058917] env[69927]: _type = "Task" [ 1265.058917] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.067815] env[69927]: DEBUG oslo_vmware.api [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520b8a33-6bb3-9a59-4e3f-34d8cfacb46c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.223663] env[69927]: DEBUG oslo_vmware.api [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096914, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.277293} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.223977] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1265.224183] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1265.224362] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1265.224532] env[69927]: INFO nova.compute.manager [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1265.224771] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1265.224984] env[69927]: DEBUG nova.compute.manager [-] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1265.225122] env[69927]: DEBUG nova.network.neutron [-] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1265.294207] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6ef261ec-27ec-4f4a-8de0-03ebd502ea25 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "afdd23d0-c8e0-4d49-a188-525b6b3f31c8" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.294453] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6ef261ec-27ec-4f4a-8de0-03ebd502ea25 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "afdd23d0-c8e0-4d49-a188-525b6b3f31c8" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.294695] env[69927]: INFO nova.compute.manager [None req-6ef261ec-27ec-4f4a-8de0-03ebd502ea25 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Attaching volume 431d9973-8315-4693-869d-e24019eb9d16 to /dev/sdb [ 1265.329230] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a866f3ad-d751-4a9c-90ea-07153a5d7876 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.336607] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e297e26-9e04-40aa-b9cd-73b55885a3c5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.351356] env[69927]: DEBUG nova.virt.block_device [None req-6ef261ec-27ec-4f4a-8de0-03ebd502ea25 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Updating existing volume attachment record: 501b5c72-2b5b-43c0-b8d6-8b14779f0026 {{(pid=69927) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1265.470381] env[69927]: DEBUG nova.compute.manager [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=69927) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:900}} [ 1265.507966] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1265.507966] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69927) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1265.570917] env[69927]: DEBUG oslo_vmware.api [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520b8a33-6bb3-9a59-4e3f-34d8cfacb46c, 'name': SearchDatastore_Task, 'duration_secs': 0.011518} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.571208] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1265.571466] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11/f524494e-9179-4b3e-a3e2-782f019def24-rescue.vmdk. {{(pid=69927) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1265.572242] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-22ba2f55-340e-4541-a5e6-0f2f67ca0298 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.581310] env[69927]: DEBUG oslo_vmware.api [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Waiting for the task: (returnval){ [ 1265.581310] env[69927]: value = "task-4096916" [ 1265.581310] env[69927]: _type = "Task" [ 1265.581310] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.592175] env[69927]: DEBUG oslo_vmware.api [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096916, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.601123] env[69927]: DEBUG nova.compute.manager [req-9b93bd7c-1b30-4240-b600-e42fea3746e5 req-9f2d30c4-6ce8-4b31-a466-b153935b1478 service nova] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Received event network-vif-deleted-1e9afa83-f755-4605-8818-5470b4d2bd50 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1265.601123] env[69927]: INFO nova.compute.manager [req-9b93bd7c-1b30-4240-b600-e42fea3746e5 req-9f2d30c4-6ce8-4b31-a466-b153935b1478 service nova] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Neutron deleted interface 1e9afa83-f755-4605-8818-5470b4d2bd50; detaching it from the instance and deleting it from the info cache [ 1265.601446] env[69927]: DEBUG nova.network.neutron [req-9b93bd7c-1b30-4240-b600-e42fea3746e5 req-9f2d30c4-6ce8-4b31-a466-b153935b1478 service nova] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1265.697954] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "659e2584-88a8-4382-98c8-f50fcab78e0c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.698257] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "659e2584-88a8-4382-98c8-f50fcab78e0c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.698498] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "659e2584-88a8-4382-98c8-f50fcab78e0c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.698736] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "659e2584-88a8-4382-98c8-f50fcab78e0c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.698940] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "659e2584-88a8-4382-98c8-f50fcab78e0c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1265.701557] env[69927]: INFO nova.compute.manager [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Terminating instance [ 1266.087065] env[69927]: DEBUG nova.network.neutron [-] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1266.093036] env[69927]: DEBUG oslo_vmware.api [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096916, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483778} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.093852] env[69927]: INFO nova.virt.vmwareapi.ds_util [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11/f524494e-9179-4b3e-a3e2-782f019def24-rescue.vmdk. [ 1266.094659] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c553b5-9877-4c25-b972-80681a2418ab {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.121869] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11/f524494e-9179-4b3e-a3e2-782f019def24-rescue.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1266.122203] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9a0f3e74-62e6-4c85-b85d-b34751e86664 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.125047] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3fce3758-5491-4952-97a7-c4af57278439 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.146807] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd4a335a-cb00-4ac9-82c9-565421b5fd9c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.159874] env[69927]: DEBUG oslo_vmware.api [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Waiting for the task: (returnval){ [ 1266.159874] env[69927]: value = "task-4096917" [ 1266.159874] env[69927]: _type = "Task" [ 1266.159874] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.175988] env[69927]: DEBUG oslo_vmware.api [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096917, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.188160] env[69927]: DEBUG nova.compute.manager [req-9b93bd7c-1b30-4240-b600-e42fea3746e5 req-9f2d30c4-6ce8-4b31-a466-b153935b1478 service nova] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Detach interface failed, port_id=1e9afa83-f755-4605-8818-5470b4d2bd50, reason: Instance 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1266.205854] env[69927]: DEBUG nova.compute.manager [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1266.206269] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1266.207155] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a54b3a74-7272-4bfd-90ce-91bc908fefa6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.215535] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1266.215806] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bca8b641-70ae-48f4-925d-354aa8725fdb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.223897] env[69927]: DEBUG oslo_vmware.api [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1266.223897] env[69927]: value = "task-4096918" [ 1266.223897] env[69927]: _type = "Task" [ 1266.223897] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.238071] env[69927]: DEBUG oslo_vmware.api [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096918, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.507668] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager.update_available_resource {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1266.581986] env[69927]: DEBUG oslo_concurrency.lockutils [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1266.582629] env[69927]: DEBUG oslo_concurrency.lockutils [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1266.589421] env[69927]: INFO nova.compute.manager [-] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Took 1.36 seconds to deallocate network for instance. [ 1266.670285] env[69927]: DEBUG oslo_vmware.api [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096917, 'name': ReconfigVM_Task, 'duration_secs': 0.349594} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.670577] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Reconfigured VM instance instance-00000073 to attach disk [datastore2] 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11/f524494e-9179-4b3e-a3e2-782f019def24-rescue.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1266.671478] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc65419-7635-467e-9b30-ed401461758e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.696703] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-edec500e-60c7-40bc-b4a6-a214a53f37cb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.714192] env[69927]: DEBUG oslo_vmware.api [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Waiting for the task: (returnval){ [ 1266.714192] env[69927]: value = "task-4096919" [ 1266.714192] env[69927]: _type = "Task" [ 1266.714192] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.728061] env[69927]: DEBUG oslo_vmware.api [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096919, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.741028] env[69927]: DEBUG oslo_vmware.api [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096918, 'name': PowerOffVM_Task, 'duration_secs': 0.292126} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.741495] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1266.741827] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1266.742237] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-23eb9e01-4a0b-45ea-865f-b9b53f8d0b3e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.825766] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1266.825965] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1266.826134] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Deleting the datastore file [datastore1] 659e2584-88a8-4382-98c8-f50fcab78e0c {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1266.826406] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b22935db-245f-4458-90aa-fc84b307ec49 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.834518] env[69927]: DEBUG oslo_vmware.api [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1266.834518] env[69927]: value = "task-4096921" [ 1266.834518] env[69927]: _type = "Task" [ 1266.834518] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.843788] env[69927]: DEBUG oslo_vmware.api [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096921, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.011296] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1267.087301] env[69927]: DEBUG nova.objects.instance [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lazy-loading 'migration_context' on Instance uuid 8b70b479-4a54-4bcb-813d-16cc0c9a67c5 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1267.095865] env[69927]: DEBUG oslo_concurrency.lockutils [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1267.224412] env[69927]: DEBUG oslo_vmware.api [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096919, 'name': ReconfigVM_Task, 'duration_secs': 0.159035} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.224654] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1267.224919] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c9966159-c110-4480-b06e-88442f47ca9e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.232488] env[69927]: DEBUG oslo_vmware.api [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Waiting for the task: (returnval){ [ 1267.232488] env[69927]: value = "task-4096922" [ 1267.232488] env[69927]: _type = "Task" [ 1267.232488] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.240642] env[69927]: DEBUG oslo_vmware.api [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096922, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.345202] env[69927]: DEBUG oslo_vmware.api [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096921, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159611} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.345434] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1267.345602] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1267.345864] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1267.346055] env[69927]: INFO nova.compute.manager [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1267.346313] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1267.346511] env[69927]: DEBUG nova.compute.manager [-] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1267.346608] env[69927]: DEBUG nova.network.neutron [-] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1267.743305] env[69927]: DEBUG oslo_vmware.api [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096922, 'name': PowerOnVM_Task, 'duration_secs': 0.442213} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.745966] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1267.749660] env[69927]: DEBUG nova.compute.manager [None req-fac8559a-d073-4bcc-aa1e-3f53e31b0607 tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1267.750523] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6b568a5-0f74-453b-baa8-8ab4a9b5bab7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.774878] env[69927]: DEBUG nova.compute.manager [req-43d53588-fce1-4b26-826a-4525cb0721b7 req-00af2529-2218-4a8c-9e92-9dab160639f2 service nova] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Received event network-vif-deleted-340dd818-a9e9-44ba-8a3b-a6bf475270e5 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1267.775085] env[69927]: INFO nova.compute.manager [req-43d53588-fce1-4b26-826a-4525cb0721b7 req-00af2529-2218-4a8c-9e92-9dab160639f2 service nova] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Neutron deleted interface 340dd818-a9e9-44ba-8a3b-a6bf475270e5; detaching it from the instance and deleting it from the info cache [ 1267.775254] env[69927]: DEBUG nova.network.neutron [req-43d53588-fce1-4b26-826a-4525cb0721b7 req-00af2529-2218-4a8c-9e92-9dab160639f2 service nova] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1267.824707] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a00e7f1a-e239-4a9b-9b45-c1cd0f5377e9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.833359] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d2e2c07-27f5-47ce-a8e2-c2b3e8895058 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.865518] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fc67f57-d4fb-4152-ba6f-55a8ab46dd22 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.873902] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2febc20-c891-4393-9056-daac978bd46f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.887719] env[69927]: DEBUG nova.compute.provider_tree [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1268.185841] env[69927]: DEBUG nova.network.neutron [-] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1268.279670] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bba52359-3680-4089-a85f-2f2d9481f0ba {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.289937] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c33292a4-d960-4da3-b8ae-1fc0cac57f88 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.322933] env[69927]: DEBUG nova.compute.manager [req-43d53588-fce1-4b26-826a-4525cb0721b7 req-00af2529-2218-4a8c-9e92-9dab160639f2 service nova] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Detach interface failed, port_id=340dd818-a9e9-44ba-8a3b-a6bf475270e5, reason: Instance 659e2584-88a8-4382-98c8-f50fcab78e0c could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1268.421570] env[69927]: DEBUG nova.scheduler.client.report [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Updated inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 with generation 167 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1268.422678] env[69927]: DEBUG nova.compute.provider_tree [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Updating resource provider 2f529b36-df5f-4b37-8103-68f74f737726 generation from 167 to 168 during operation: update_inventory {{(pid=69927) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1268.422905] env[69927]: DEBUG nova.compute.provider_tree [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1268.688525] env[69927]: INFO nova.compute.manager [-] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Took 1.34 seconds to deallocate network for instance. [ 1269.196743] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1269.435755] env[69927]: DEBUG oslo_concurrency.lockutils [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.853s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1269.441352] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.430s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1269.441531] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1269.441688] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69927) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1269.441985] env[69927]: DEBUG oslo_concurrency.lockutils [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.346s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1269.442198] env[69927]: DEBUG nova.objects.instance [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lazy-loading 'resources' on Instance uuid 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1269.445697] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4494e92-2e59-4fae-b315-9ae4cebbb8ad {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.458094] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab9a0ec0-15be-4e6f-ae9e-f9c391fbbda3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.474556] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80cf687b-3e54-421a-88ee-bd133699d53d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.481659] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6561cbc7-9e9e-4fd6-a75c-8b5f28ec629e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.513456] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178866MB free_disk=17GB free_vcpus=48 pci_devices=None {{(pid=69927) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1269.513654] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1269.804192] env[69927]: DEBUG nova.compute.manager [req-0d60e10f-02de-4dfd-858c-3474bfa3185f req-3bfcfdb5-d590-4425-b4e6-77e6a25b1296 service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Received event network-changed-1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1269.804192] env[69927]: DEBUG nova.compute.manager [req-0d60e10f-02de-4dfd-858c-3474bfa3185f req-3bfcfdb5-d590-4425-b4e6-77e6a25b1296 service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Refreshing instance network info cache due to event network-changed-1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1269.804192] env[69927]: DEBUG oslo_concurrency.lockutils [req-0d60e10f-02de-4dfd-858c-3474bfa3185f req-3bfcfdb5-d590-4425-b4e6-77e6a25b1296 service nova] Acquiring lock "refresh_cache-8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1269.804192] env[69927]: DEBUG oslo_concurrency.lockutils [req-0d60e10f-02de-4dfd-858c-3474bfa3185f req-3bfcfdb5-d590-4425-b4e6-77e6a25b1296 service nova] Acquired lock "refresh_cache-8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1269.804192] env[69927]: DEBUG nova.network.neutron [req-0d60e10f-02de-4dfd-858c-3474bfa3185f req-3bfcfdb5-d590-4425-b4e6-77e6a25b1296 service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Refreshing network info cache for port 1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1269.897402] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ef261ec-27ec-4f4a-8de0-03ebd502ea25 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Volume attach. Driver type: vmdk {{(pid=69927) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1269.897664] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ef261ec-27ec-4f4a-8de0-03ebd502ea25 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811603', 'volume_id': '431d9973-8315-4693-869d-e24019eb9d16', 'name': 'volume-431d9973-8315-4693-869d-e24019eb9d16', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'afdd23d0-c8e0-4d49-a188-525b6b3f31c8', 'attached_at': '', 'detached_at': '', 'volume_id': '431d9973-8315-4693-869d-e24019eb9d16', 'serial': '431d9973-8315-4693-869d-e24019eb9d16'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1269.898673] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a9e90cf-8227-4fd8-875e-6a7caac634c8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.918058] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6089c198-d13a-4722-9a4d-e06d48b24ac2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.944830] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ef261ec-27ec-4f4a-8de0-03ebd502ea25 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] volume-431d9973-8315-4693-869d-e24019eb9d16/volume-431d9973-8315-4693-869d-e24019eb9d16.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1269.945552] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-520464a3-d4d7-4667-bb0b-c8c34792e229 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.970272] env[69927]: DEBUG oslo_vmware.api [None req-6ef261ec-27ec-4f4a-8de0-03ebd502ea25 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1269.970272] env[69927]: value = "task-4096924" [ 1269.970272] env[69927]: _type = "Task" [ 1269.970272] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.980237] env[69927]: DEBUG oslo_vmware.api [None req-6ef261ec-27ec-4f4a-8de0-03ebd502ea25 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096924, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.166273] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c476e54-f337-4e21-a0ad-ecc1b4bc2503 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.175013] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a140ccb-4861-482a-8be1-1f3060b1fe22 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.995022] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e368e40-2704-49c4-90ff-778890164523 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.005175] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b22cbdd5-753f-4454-8e78-09d9abd787ab {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.009059] env[69927]: DEBUG oslo_vmware.api [None req-6ef261ec-27ec-4f4a-8de0-03ebd502ea25 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096924, 'name': ReconfigVM_Task, 'duration_secs': 0.484659} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.009339] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ef261ec-27ec-4f4a-8de0-03ebd502ea25 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Reconfigured VM instance instance-0000006f to attach disk [datastore1] volume-431d9973-8315-4693-869d-e24019eb9d16/volume-431d9973-8315-4693-869d-e24019eb9d16.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1271.014383] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2aa70357-08e4-4cf2-8748-32ff3a84c16d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.034193] env[69927]: DEBUG nova.compute.provider_tree [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1271.042699] env[69927]: DEBUG oslo_vmware.api [None req-6ef261ec-27ec-4f4a-8de0-03ebd502ea25 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1271.042699] env[69927]: value = "task-4096925" [ 1271.042699] env[69927]: _type = "Task" [ 1271.042699] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.055033] env[69927]: DEBUG oslo_vmware.api [None req-6ef261ec-27ec-4f4a-8de0-03ebd502ea25 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096925, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.395371] env[69927]: DEBUG nova.network.neutron [req-0d60e10f-02de-4dfd-858c-3474bfa3185f req-3bfcfdb5-d590-4425-b4e6-77e6a25b1296 service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Updated VIF entry in instance network info cache for port 1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1271.395740] env[69927]: DEBUG nova.network.neutron [req-0d60e10f-02de-4dfd-858c-3474bfa3185f req-3bfcfdb5-d590-4425-b4e6-77e6a25b1296 service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Updating instance_info_cache with network_info: [{"id": "1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2", "address": "fa:16:3e:8e:7a:45", "network": {"id": "25e3d194-9d78-4e94-abb8-b5f521eb1990", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-666062733-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bf75fa4b8ec6436999f00b7cb4b57e24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ce221a9-57", "ovs_interfaceid": "1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1271.514194] env[69927]: INFO nova.compute.manager [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Swapping old allocation on dict_keys(['2f529b36-df5f-4b37-8103-68f74f737726']) held by migration c7013a21-99b2-4237-b87d-4f1ebfd104b5 for instance [ 1271.536362] env[69927]: DEBUG nova.scheduler.client.report [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Overwriting current allocation {'allocations': {'2f529b36-df5f-4b37-8103-68f74f737726': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 168}}, 'project_id': '9cf6bb3492c642aa9a168e484299289c', 'user_id': '16c1e562693c466c8786016a777f9f32', 'consumer_generation': 1} on consumer 8b70b479-4a54-4bcb-813d-16cc0c9a67c5 {{(pid=69927) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1271.539044] env[69927]: DEBUG nova.scheduler.client.report [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1271.555272] env[69927]: DEBUG oslo_vmware.api [None req-6ef261ec-27ec-4f4a-8de0-03ebd502ea25 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096925, 'name': ReconfigVM_Task, 'duration_secs': 0.267389} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.555611] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ef261ec-27ec-4f4a-8de0-03ebd502ea25 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811603', 'volume_id': '431d9973-8315-4693-869d-e24019eb9d16', 'name': 'volume-431d9973-8315-4693-869d-e24019eb9d16', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'afdd23d0-c8e0-4d49-a188-525b6b3f31c8', 'attached_at': '', 'detached_at': '', 'volume_id': '431d9973-8315-4693-869d-e24019eb9d16', 'serial': '431d9973-8315-4693-869d-e24019eb9d16'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1271.634429] env[69927]: DEBUG oslo_concurrency.lockutils [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "refresh_cache-8b70b479-4a54-4bcb-813d-16cc0c9a67c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1271.634613] env[69927]: DEBUG oslo_concurrency.lockutils [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquired lock "refresh_cache-8b70b479-4a54-4bcb-813d-16cc0c9a67c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1271.634786] env[69927]: DEBUG nova.network.neutron [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1271.836998] env[69927]: DEBUG nova.compute.manager [req-190d83f3-9080-4a4b-8118-585177874b3d req-61c8bfd9-425e-459b-af81-c28421e107fe service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Received event network-changed-1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1271.837172] env[69927]: DEBUG nova.compute.manager [req-190d83f3-9080-4a4b-8118-585177874b3d req-61c8bfd9-425e-459b-af81-c28421e107fe service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Refreshing instance network info cache due to event network-changed-1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1271.837363] env[69927]: DEBUG oslo_concurrency.lockutils [req-190d83f3-9080-4a4b-8118-585177874b3d req-61c8bfd9-425e-459b-af81-c28421e107fe service nova] Acquiring lock "refresh_cache-8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1271.898855] env[69927]: DEBUG oslo_concurrency.lockutils [req-0d60e10f-02de-4dfd-858c-3474bfa3185f req-3bfcfdb5-d590-4425-b4e6-77e6a25b1296 service nova] Releasing lock "refresh_cache-8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1271.899374] env[69927]: DEBUG nova.compute.manager [req-0d60e10f-02de-4dfd-858c-3474bfa3185f req-3bfcfdb5-d590-4425-b4e6-77e6a25b1296 service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Received event network-changed-1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1271.899374] env[69927]: DEBUG nova.compute.manager [req-0d60e10f-02de-4dfd-858c-3474bfa3185f req-3bfcfdb5-d590-4425-b4e6-77e6a25b1296 service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Refreshing instance network info cache due to event network-changed-1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1271.899499] env[69927]: DEBUG oslo_concurrency.lockutils [req-0d60e10f-02de-4dfd-858c-3474bfa3185f req-3bfcfdb5-d590-4425-b4e6-77e6a25b1296 service nova] Acquiring lock "refresh_cache-8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1271.899737] env[69927]: DEBUG oslo_concurrency.lockutils [req-0d60e10f-02de-4dfd-858c-3474bfa3185f req-3bfcfdb5-d590-4425-b4e6-77e6a25b1296 service nova] Acquired lock "refresh_cache-8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1271.899917] env[69927]: DEBUG nova.network.neutron [req-0d60e10f-02de-4dfd-858c-3474bfa3185f req-3bfcfdb5-d590-4425-b4e6-77e6a25b1296 service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Refreshing network info cache for port 1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1272.044536] env[69927]: DEBUG oslo_concurrency.lockutils [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.602s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1272.046779] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.850s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1272.047016] env[69927]: DEBUG nova.objects.instance [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lazy-loading 'resources' on Instance uuid 659e2584-88a8-4382-98c8-f50fcab78e0c {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1272.066502] env[69927]: INFO nova.scheduler.client.report [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Deleted allocations for instance 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19 [ 1272.552052] env[69927]: DEBUG nova.network.neutron [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Updating instance_info_cache with network_info: [{"id": "3e85edce-fa8a-45d4-b109-5bdd98a06303", "address": "fa:16:3e:df:fd:25", "network": {"id": "bd90fedf-a086-42e6-9814-07044a035f6e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-951403655-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cf6bb3492c642aa9a168e484299289c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e85edce-fa", "ovs_interfaceid": "3e85edce-fa8a-45d4-b109-5bdd98a06303", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1272.572907] env[69927]: DEBUG oslo_concurrency.lockutils [None req-98b649c8-697b-4f85-8cca-a612014a50d0 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "923b70fc-9959-48cf-8a9f-f8cd7c0c6b19" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.985s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1272.605456] env[69927]: DEBUG nova.objects.instance [None req-6ef261ec-27ec-4f4a-8de0-03ebd502ea25 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lazy-loading 'flavor' on Instance uuid afdd23d0-c8e0-4d49-a188-525b6b3f31c8 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1272.634234] env[69927]: DEBUG nova.network.neutron [req-0d60e10f-02de-4dfd-858c-3474bfa3185f req-3bfcfdb5-d590-4425-b4e6-77e6a25b1296 service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Updated VIF entry in instance network info cache for port 1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1272.634234] env[69927]: DEBUG nova.network.neutron [req-0d60e10f-02de-4dfd-858c-3474bfa3185f req-3bfcfdb5-d590-4425-b4e6-77e6a25b1296 service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Updating instance_info_cache with network_info: [{"id": "1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2", "address": "fa:16:3e:8e:7a:45", "network": {"id": "25e3d194-9d78-4e94-abb8-b5f521eb1990", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-666062733-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bf75fa4b8ec6436999f00b7cb4b57e24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ce221a9-57", "ovs_interfaceid": "1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1272.752593] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b80466f-783b-49be-bff1-34cb020b295c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.763489] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be5d3ce3-d9a7-44e2-a731-915a1bc739c1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.798152] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7480b93d-e0af-4df6-8757-58e13a876c98 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.807658] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b1b416-124e-4b25-92e8-7c2fafe6548c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.824249] env[69927]: DEBUG nova.compute.provider_tree [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1273.059573] env[69927]: DEBUG oslo_concurrency.lockutils [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Releasing lock "refresh_cache-8b70b479-4a54-4bcb-813d-16cc0c9a67c5" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1273.060479] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f230a6e-14d2-48d1-9a5b-7ba46fe4dc88 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.067934] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd1d674-6657-4182-b48b-15e47e9b05d7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.116056] env[69927]: DEBUG oslo_concurrency.lockutils [None req-6ef261ec-27ec-4f4a-8de0-03ebd502ea25 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "afdd23d0-c8e0-4d49-a188-525b6b3f31c8" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.821s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1273.137385] env[69927]: DEBUG oslo_concurrency.lockutils [req-0d60e10f-02de-4dfd-858c-3474bfa3185f req-3bfcfdb5-d590-4425-b4e6-77e6a25b1296 service nova] Releasing lock "refresh_cache-8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1273.137857] env[69927]: DEBUG oslo_concurrency.lockutils [req-190d83f3-9080-4a4b-8118-585177874b3d req-61c8bfd9-425e-459b-af81-c28421e107fe service nova] Acquired lock "refresh_cache-8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1273.138106] env[69927]: DEBUG nova.network.neutron [req-190d83f3-9080-4a4b-8118-585177874b3d req-61c8bfd9-425e-459b-af81-c28421e107fe service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Refreshing network info cache for port 1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1273.263621] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1eb86a49-b060-4793-b0a5-c2e143e256b2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "afdd23d0-c8e0-4d49-a188-525b6b3f31c8" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1273.264034] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1eb86a49-b060-4793-b0a5-c2e143e256b2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "afdd23d0-c8e0-4d49-a188-525b6b3f31c8" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1273.301850] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Acquiring lock "8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1273.302563] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Lock "8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1273.302563] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Acquiring lock "8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1273.302698] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Lock "8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1273.302810] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Lock "8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1273.304981] env[69927]: INFO nova.compute.manager [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Terminating instance [ 1273.315901] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "eb84d52d-7153-412b-9ed9-4b7986cdfbbf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1273.316113] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "eb84d52d-7153-412b-9ed9-4b7986cdfbbf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1273.326946] env[69927]: DEBUG nova.scheduler.client.report [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1273.766764] env[69927]: INFO nova.compute.manager [None req-1eb86a49-b060-4793-b0a5-c2e143e256b2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Detaching volume 431d9973-8315-4693-869d-e24019eb9d16 [ 1273.798556] env[69927]: INFO nova.virt.block_device [None req-1eb86a49-b060-4793-b0a5-c2e143e256b2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Attempting to driver detach volume 431d9973-8315-4693-869d-e24019eb9d16 from mountpoint /dev/sdb [ 1273.798810] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1eb86a49-b060-4793-b0a5-c2e143e256b2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Volume detach. Driver type: vmdk {{(pid=69927) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1273.798996] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1eb86a49-b060-4793-b0a5-c2e143e256b2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811603', 'volume_id': '431d9973-8315-4693-869d-e24019eb9d16', 'name': 'volume-431d9973-8315-4693-869d-e24019eb9d16', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'afdd23d0-c8e0-4d49-a188-525b6b3f31c8', 'attached_at': '', 'detached_at': '', 'volume_id': '431d9973-8315-4693-869d-e24019eb9d16', 'serial': '431d9973-8315-4693-869d-e24019eb9d16'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1273.800014] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a9aeb2-e1a6-4f9a-beb0-4799b21aa32e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.808264] env[69927]: DEBUG nova.compute.manager [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1273.808471] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1273.830091] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2120a052-2887-401f-96d8-f4d3472c12d2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.834207] env[69927]: DEBUG nova.compute.manager [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1273.839079] env[69927]: DEBUG nova.network.neutron [req-190d83f3-9080-4a4b-8118-585177874b3d req-61c8bfd9-425e-459b-af81-c28421e107fe service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Updated VIF entry in instance network info cache for port 1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1273.839609] env[69927]: DEBUG nova.network.neutron [req-190d83f3-9080-4a4b-8118-585177874b3d req-61c8bfd9-425e-459b-af81-c28421e107fe service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Updating instance_info_cache with network_info: [{"id": "1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2", "address": "fa:16:3e:8e:7a:45", "network": {"id": "25e3d194-9d78-4e94-abb8-b5f521eb1990", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-666062733-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bf75fa4b8ec6436999f00b7cb4b57e24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ce221a9-57", "ovs_interfaceid": "1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1273.841584] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.795s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1273.845850] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1499c09-8bf8-4fb2-993f-f4e4271bd03a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.850427] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 4.337s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1273.863139] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1273.863773] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d03e8ca4-692d-4332-af9b-a54108cd7f78 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.866781] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54fd9458-7e0c-43ff-9a62-aabf53cc951b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.894217] env[69927]: INFO nova.scheduler.client.report [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Deleted allocations for instance 659e2584-88a8-4382-98c8-f50fcab78e0c [ 1273.897048] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91871fb0-6457-48ae-b729-552912c81356 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.899919] env[69927]: DEBUG oslo_vmware.api [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Waiting for the task: (returnval){ [ 1273.899919] env[69927]: value = "task-4096926" [ 1273.899919] env[69927]: _type = "Task" [ 1273.899919] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.919457] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1eb86a49-b060-4793-b0a5-c2e143e256b2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] The volume has not been displaced from its original location: [datastore1] volume-431d9973-8315-4693-869d-e24019eb9d16/volume-431d9973-8315-4693-869d-e24019eb9d16.vmdk. No consolidation needed. {{(pid=69927) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1273.924893] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1eb86a49-b060-4793-b0a5-c2e143e256b2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Reconfiguring VM instance instance-0000006f to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1273.925978] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4fe6965d-7331-4c52-ad18-b14e4d1bb3f8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.943649] env[69927]: DEBUG oslo_vmware.api [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096926, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.951561] env[69927]: DEBUG oslo_vmware.api [None req-1eb86a49-b060-4793-b0a5-c2e143e256b2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1273.951561] env[69927]: value = "task-4096927" [ 1273.951561] env[69927]: _type = "Task" [ 1273.951561] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.961347] env[69927]: DEBUG oslo_vmware.api [None req-1eb86a49-b060-4793-b0a5-c2e143e256b2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096927, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.148745] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1274.149143] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fecc35f5-49b0-42ff-88ba-8d826d82242d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.159466] env[69927]: DEBUG oslo_vmware.api [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1274.159466] env[69927]: value = "task-4096928" [ 1274.159466] env[69927]: _type = "Task" [ 1274.159466] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.168341] env[69927]: DEBUG oslo_vmware.api [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096928, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.352896] env[69927]: DEBUG oslo_concurrency.lockutils [req-190d83f3-9080-4a4b-8118-585177874b3d req-61c8bfd9-425e-459b-af81-c28421e107fe service nova] Releasing lock "refresh_cache-8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1274.353147] env[69927]: DEBUG nova.compute.manager [req-190d83f3-9080-4a4b-8118-585177874b3d req-61c8bfd9-425e-459b-af81-c28421e107fe service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Received event network-changed-1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1274.353318] env[69927]: DEBUG nova.compute.manager [req-190d83f3-9080-4a4b-8118-585177874b3d req-61c8bfd9-425e-459b-af81-c28421e107fe service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Refreshing instance network info cache due to event network-changed-1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1274.353523] env[69927]: DEBUG oslo_concurrency.lockutils [req-190d83f3-9080-4a4b-8118-585177874b3d req-61c8bfd9-425e-459b-af81-c28421e107fe service nova] Acquiring lock "refresh_cache-8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1274.353903] env[69927]: DEBUG oslo_concurrency.lockutils [req-190d83f3-9080-4a4b-8118-585177874b3d req-61c8bfd9-425e-459b-af81-c28421e107fe service nova] Acquired lock "refresh_cache-8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1274.353903] env[69927]: DEBUG nova.network.neutron [req-190d83f3-9080-4a4b-8118-585177874b3d req-61c8bfd9-425e-459b-af81-c28421e107fe service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Refreshing network info cache for port 1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1274.375496] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1274.411713] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fe0a7a2d-ba13-4de9-af13-03f2eff415a7 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "659e2584-88a8-4382-98c8-f50fcab78e0c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.713s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1274.417394] env[69927]: DEBUG oslo_vmware.api [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096926, 'name': PowerOffVM_Task, 'duration_secs': 0.205589} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.417710] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1274.417893] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1274.418186] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f34fda4c-255a-4d66-8371-536b5015efe6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.461259] env[69927]: DEBUG oslo_vmware.api [None req-1eb86a49-b060-4793-b0a5-c2e143e256b2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096927, 'name': ReconfigVM_Task, 'duration_secs': 0.239414} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.461625] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1eb86a49-b060-4793-b0a5-c2e143e256b2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Reconfigured VM instance instance-0000006f to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1274.468266] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b0135ba-873c-4bca-845f-2478d8174a04 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.484466] env[69927]: DEBUG oslo_vmware.api [None req-1eb86a49-b060-4793-b0a5-c2e143e256b2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1274.484466] env[69927]: value = "task-4096930" [ 1274.484466] env[69927]: _type = "Task" [ 1274.484466] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.490797] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1274.491014] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1274.491205] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Deleting the datastore file [datastore2] 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1274.491841] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-abc44acd-533f-4024-bf1b-56db3ff9f948 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.496955] env[69927]: DEBUG oslo_vmware.api [None req-1eb86a49-b060-4793-b0a5-c2e143e256b2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096930, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.502548] env[69927]: DEBUG oslo_vmware.api [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Waiting for the task: (returnval){ [ 1274.502548] env[69927]: value = "task-4096931" [ 1274.502548] env[69927]: _type = "Task" [ 1274.502548] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.514052] env[69927]: DEBUG oslo_vmware.api [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096931, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.669719] env[69927]: DEBUG oslo_vmware.api [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096928, 'name': PowerOffVM_Task, 'duration_secs': 0.286289} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.670033] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1274.670733] env[69927]: DEBUG nova.virt.hardware [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1274.670949] env[69927]: DEBUG nova.virt.hardware [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1274.671119] env[69927]: DEBUG nova.virt.hardware [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1274.671318] env[69927]: DEBUG nova.virt.hardware [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1274.671467] env[69927]: DEBUG nova.virt.hardware [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1274.671633] env[69927]: DEBUG nova.virt.hardware [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1274.671844] env[69927]: DEBUG nova.virt.hardware [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1274.672040] env[69927]: DEBUG nova.virt.hardware [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1274.672226] env[69927]: DEBUG nova.virt.hardware [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1274.672389] env[69927]: DEBUG nova.virt.hardware [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1274.672578] env[69927]: DEBUG nova.virt.hardware [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1274.677755] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34efcb0e-b6fd-466e-beb5-dde3072e5871 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.694600] env[69927]: DEBUG oslo_vmware.api [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1274.694600] env[69927]: value = "task-4096932" [ 1274.694600] env[69927]: _type = "Task" [ 1274.694600] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.704542] env[69927]: DEBUG oslo_vmware.api [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096932, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.803379] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1274.803871] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1274.803871] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1274.804027] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1274.804214] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1274.806470] env[69927]: INFO nova.compute.manager [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Terminating instance [ 1274.890670] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1274.890877] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 15c44d86-829f-4317-ab66-9e61d4fb4dd0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1274.891065] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance cff307ed-3c8b-4126-9749-1204597cbf6c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1274.891310] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance da468d11-82a4-4fec-b06a-1b522bacdbc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1274.891423] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance b007a697-7da4-4c97-9ccb-046d86b27568 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1274.891585] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance afdd23d0-c8e0-4d49-a188-525b6b3f31c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1274.891754] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1274.891916] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 693a6c6b-8d1c-405e-bb17-73259e28f556 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1274.892058] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 8b70b479-4a54-4bcb-813d-16cc0c9a67c5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1274.892302] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1274.996865] env[69927]: DEBUG oslo_vmware.api [None req-1eb86a49-b060-4793-b0a5-c2e143e256b2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096930, 'name': ReconfigVM_Task, 'duration_secs': 0.170229} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.997211] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-1eb86a49-b060-4793-b0a5-c2e143e256b2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811603', 'volume_id': '431d9973-8315-4693-869d-e24019eb9d16', 'name': 'volume-431d9973-8315-4693-869d-e24019eb9d16', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'afdd23d0-c8e0-4d49-a188-525b6b3f31c8', 'attached_at': '', 'detached_at': '', 'volume_id': '431d9973-8315-4693-869d-e24019eb9d16', 'serial': '431d9973-8315-4693-869d-e24019eb9d16'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1275.016358] env[69927]: DEBUG oslo_vmware.api [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Task: {'id': task-4096931, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.19741} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.016358] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1275.016358] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1275.016358] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1275.016358] env[69927]: INFO nova.compute.manager [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1275.016577] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1275.016620] env[69927]: DEBUG nova.compute.manager [-] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1275.016706] env[69927]: DEBUG nova.network.neutron [-] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1275.074816] env[69927]: DEBUG nova.network.neutron [req-190d83f3-9080-4a4b-8118-585177874b3d req-61c8bfd9-425e-459b-af81-c28421e107fe service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Updated VIF entry in instance network info cache for port 1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1275.075292] env[69927]: DEBUG nova.network.neutron [req-190d83f3-9080-4a4b-8118-585177874b3d req-61c8bfd9-425e-459b-af81-c28421e107fe service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Updating instance_info_cache with network_info: [{"id": "1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2", "address": "fa:16:3e:8e:7a:45", "network": {"id": "25e3d194-9d78-4e94-abb8-b5f521eb1990", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-666062733-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bf75fa4b8ec6436999f00b7cb4b57e24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ce221a9-57", "ovs_interfaceid": "1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1275.206263] env[69927]: DEBUG oslo_vmware.api [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096932, 'name': ReconfigVM_Task, 'duration_secs': 0.200401} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.207239] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3507bce7-c9f4-4930-87a7-f9a3c37aacbd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.230742] env[69927]: DEBUG nova.virt.hardware [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1275.231016] env[69927]: DEBUG nova.virt.hardware [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1275.231234] env[69927]: DEBUG nova.virt.hardware [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1275.231470] env[69927]: DEBUG nova.virt.hardware [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1275.231694] env[69927]: DEBUG nova.virt.hardware [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1275.231908] env[69927]: DEBUG nova.virt.hardware [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1275.232169] env[69927]: DEBUG nova.virt.hardware [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1275.232379] env[69927]: DEBUG nova.virt.hardware [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1275.233279] env[69927]: DEBUG nova.virt.hardware [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1275.233279] env[69927]: DEBUG nova.virt.hardware [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1275.233279] env[69927]: DEBUG nova.virt.hardware [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1275.233951] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a6895ff-3de9-45be-9cbe-62fc68cade12 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.240359] env[69927]: DEBUG oslo_vmware.api [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1275.240359] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a92d4b-1ea3-c42f-b369-492c34cf0bcd" [ 1275.240359] env[69927]: _type = "Task" [ 1275.240359] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.249394] env[69927]: DEBUG oslo_vmware.api [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a92d4b-1ea3-c42f-b369-492c34cf0bcd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.296248] env[69927]: DEBUG nova.compute.manager [req-6bbfef10-cc99-4d20-9d43-ab9ea8c48cc9 req-d36c5cfb-4cfc-4979-afaa-8d77306b90b2 service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Received event network-vif-deleted-1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1275.296500] env[69927]: INFO nova.compute.manager [req-6bbfef10-cc99-4d20-9d43-ab9ea8c48cc9 req-d36c5cfb-4cfc-4979-afaa-8d77306b90b2 service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Neutron deleted interface 1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2; detaching it from the instance and deleting it from the info cache [ 1275.296686] env[69927]: DEBUG nova.network.neutron [req-6bbfef10-cc99-4d20-9d43-ab9ea8c48cc9 req-d36c5cfb-4cfc-4979-afaa-8d77306b90b2 service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1275.309827] env[69927]: DEBUG nova.compute.manager [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1275.310048] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1275.311138] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfecd984-9aab-485c-8470-f3fc5242f7b1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.319649] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1275.319897] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1fce58be-6ba3-420a-ba0c-82e7aa82bd1d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.326878] env[69927]: DEBUG oslo_vmware.api [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1275.326878] env[69927]: value = "task-4096933" [ 1275.326878] env[69927]: _type = "Task" [ 1275.326878] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.335689] env[69927]: DEBUG oslo_vmware.api [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096933, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.395645] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance eb84d52d-7153-412b-9ed9-4b7986cdfbbf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 1275.395949] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1275.396109] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1275.548888] env[69927]: DEBUG nova.objects.instance [None req-1eb86a49-b060-4793-b0a5-c2e143e256b2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lazy-loading 'flavor' on Instance uuid afdd23d0-c8e0-4d49-a188-525b6b3f31c8 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1275.559180] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c5111fa-2ad0-4e7c-b2be-a8b35bc07025 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.569106] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e5a6d8-ff47-46fc-81ea-313ceabe7da1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.599476] env[69927]: DEBUG oslo_concurrency.lockutils [req-190d83f3-9080-4a4b-8118-585177874b3d req-61c8bfd9-425e-459b-af81-c28421e107fe service nova] Releasing lock "refresh_cache-8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1275.601165] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e2f45a-d244-40d5-9fc0-c7779ee05ad4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.609396] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b422c619-7d01-4fd4-9b5f-3fe1b958645c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.623188] env[69927]: DEBUG nova.compute.provider_tree [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1275.750819] env[69927]: DEBUG oslo_vmware.api [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a92d4b-1ea3-c42f-b369-492c34cf0bcd, 'name': SearchDatastore_Task, 'duration_secs': 0.007889} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.756229] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Reconfiguring VM instance instance-0000006a to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1275.756515] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e057fe32-ba2b-474a-8aa2-b7551e1b0ae7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.775467] env[69927]: DEBUG oslo_vmware.api [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1275.775467] env[69927]: value = "task-4096934" [ 1275.775467] env[69927]: _type = "Task" [ 1275.775467] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.780427] env[69927]: DEBUG nova.network.neutron [-] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1275.786734] env[69927]: DEBUG oslo_vmware.api [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096934, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.799637] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9d757e2c-1347-472c-ba06-adc863b259e0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.810146] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1390570-b922-4bcc-9e3b-3cb1fbb72012 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.841780] env[69927]: DEBUG nova.compute.manager [req-6bbfef10-cc99-4d20-9d43-ab9ea8c48cc9 req-d36c5cfb-4cfc-4979-afaa-8d77306b90b2 service nova] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Detach interface failed, port_id=1ce221a9-5745-4efe-a99d-f2bcc0d4e7f2, reason: Instance 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1275.845181] env[69927]: DEBUG oslo_vmware.api [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096933, 'name': PowerOffVM_Task, 'duration_secs': 0.198631} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.845435] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1275.845602] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1275.845859] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9f69a6b9-f569-41eb-975d-75322221c101 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.918328] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1275.918328] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1275.918513] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Deleting the datastore file [datastore2] 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1275.918776] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2ce325ac-6551-4f9d-b79a-50e04e169118 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.926416] env[69927]: DEBUG oslo_vmware.api [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for the task: (returnval){ [ 1275.926416] env[69927]: value = "task-4096936" [ 1275.926416] env[69927]: _type = "Task" [ 1275.926416] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.936415] env[69927]: DEBUG oslo_vmware.api [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096936, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.126129] env[69927]: DEBUG nova.scheduler.client.report [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1276.286174] env[69927]: INFO nova.compute.manager [-] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Took 1.27 seconds to deallocate network for instance. [ 1276.286577] env[69927]: DEBUG oslo_vmware.api [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096934, 'name': ReconfigVM_Task, 'duration_secs': 0.283576} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.288239] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Reconfigured VM instance instance-0000006a to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1276.291594] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89058c99-fc10-4f83-ad5d-1a0d71bd6044 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.317136] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] 8b70b479-4a54-4bcb-813d-16cc0c9a67c5/8b70b479-4a54-4bcb-813d-16cc0c9a67c5.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1276.317456] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26c2b2a1-c5a1-485d-90e7-7a975b2f368e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.335966] env[69927]: DEBUG oslo_vmware.api [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1276.335966] env[69927]: value = "task-4096937" [ 1276.335966] env[69927]: _type = "Task" [ 1276.335966] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.344709] env[69927]: DEBUG oslo_vmware.api [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096937, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.438379] env[69927]: DEBUG oslo_vmware.api [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Task: {'id': task-4096936, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177339} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.438660] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1276.438820] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1276.439024] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1276.439211] env[69927]: INFO nova.compute.manager [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1276.439454] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1276.439663] env[69927]: DEBUG nova.compute.manager [-] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1276.439759] env[69927]: DEBUG nova.network.neutron [-] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1276.556455] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1eb86a49-b060-4793-b0a5-c2e143e256b2 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "afdd23d0-c8e0-4d49-a188-525b6b3f31c8" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.292s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1276.631212] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69927) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1276.631456] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.781s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1276.631668] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.256s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1276.633423] env[69927]: INFO nova.compute.claims [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1276.796170] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1276.850198] env[69927]: DEBUG oslo_vmware.api [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096937, 'name': ReconfigVM_Task, 'duration_secs': 0.303854} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.850666] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Reconfigured VM instance instance-0000006a to attach disk [datastore1] 8b70b479-4a54-4bcb-813d-16cc0c9a67c5/8b70b479-4a54-4bcb-813d-16cc0c9a67c5.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1276.851959] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2458bb17-9004-4581-8b14-1184fc2ac830 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.880030] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273227c9-f209-48cb-a3d4-4a48ad6e09c4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.901859] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad22517-7c28-4385-9418-6bc6fb86aced {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.922753] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd27522a-cc4b-413a-a42e-a339ac7607be {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.934632] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1276.934632] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-943da160-638e-4305-b128-106437edd8c5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.944889] env[69927]: DEBUG oslo_vmware.api [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1276.944889] env[69927]: value = "task-4096938" [ 1276.944889] env[69927]: _type = "Task" [ 1276.944889] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.954840] env[69927]: DEBUG oslo_vmware.api [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096938, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.193492] env[69927]: DEBUG nova.network.neutron [-] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1277.326283] env[69927]: DEBUG nova.compute.manager [req-6983b499-5543-49fb-bac6-c9437e9b19fe req-d1b1a6cb-dbad-4d21-aa80-7d8d83d2d121 service nova] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Received event network-vif-deleted-571ec5f9-628f-4a79-8f19-13c41eb94377 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1277.455382] env[69927]: DEBUG oslo_vmware.api [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096938, 'name': PowerOnVM_Task, 'duration_secs': 0.396045} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.455650] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1277.576639] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "afdd23d0-c8e0-4d49-a188-525b6b3f31c8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1277.576968] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "afdd23d0-c8e0-4d49-a188-525b6b3f31c8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1277.577229] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "afdd23d0-c8e0-4d49-a188-525b6b3f31c8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1277.577442] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "afdd23d0-c8e0-4d49-a188-525b6b3f31c8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1277.577628] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "afdd23d0-c8e0-4d49-a188-525b6b3f31c8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1277.580315] env[69927]: INFO nova.compute.manager [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Terminating instance [ 1277.696335] env[69927]: INFO nova.compute.manager [-] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Took 1.26 seconds to deallocate network for instance. [ 1277.792163] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfe3d6c1-8e84-46f8-98e0-4ea7b8e2286a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.800275] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7379a1aa-ee8c-4d28-a6cb-300b74f1b223 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.833413] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c10297f0-124b-4596-a754-21d98ac70ba2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.841439] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57e91177-47a4-44d8-885b-212bba599a9a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.857078] env[69927]: DEBUG nova.compute.provider_tree [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1278.084480] env[69927]: DEBUG nova.compute.manager [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1278.084721] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1278.085602] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd89762-98a4-4ec9-9ff8-b95ed607df73 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.095261] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1278.095570] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-81b94c49-c7cb-4ef9-93c1-459aabe598e1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.102729] env[69927]: DEBUG oslo_vmware.api [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1278.102729] env[69927]: value = "task-4096939" [ 1278.102729] env[69927]: _type = "Task" [ 1278.102729] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.111069] env[69927]: DEBUG oslo_vmware.api [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096939, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.206477] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1278.359853] env[69927]: DEBUG nova.scheduler.client.report [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1278.497845] env[69927]: INFO nova.compute.manager [None req-30986daa-6f40-4b20-bee1-2cf998bb384f tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Updating instance to original state: 'active' [ 1278.613501] env[69927]: DEBUG oslo_vmware.api [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096939, 'name': PowerOffVM_Task, 'duration_secs': 0.22401} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.613722] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1278.613910] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1278.614235] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-35eeb0aa-d331-415c-8c84-bb906b23a846 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.635727] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1278.686325] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1278.686623] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1278.686816] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Deleting the datastore file [datastore2] afdd23d0-c8e0-4d49-a188-525b6b3f31c8 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1278.687087] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef3e7774-03aa-4bf7-92f7-1e8ebdd51ce1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.694426] env[69927]: DEBUG oslo_vmware.api [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for the task: (returnval){ [ 1278.694426] env[69927]: value = "task-4096941" [ 1278.694426] env[69927]: _type = "Task" [ 1278.694426] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.703076] env[69927]: DEBUG oslo_vmware.api [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096941, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.865842] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.234s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1278.866338] env[69927]: DEBUG nova.compute.manager [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1278.868994] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.073s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1278.869227] env[69927]: DEBUG nova.objects.instance [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Lazy-loading 'resources' on Instance uuid 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1279.204868] env[69927]: DEBUG oslo_vmware.api [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Task: {'id': task-4096941, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147296} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.205166] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1279.205356] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1279.205531] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1279.205708] env[69927]: INFO nova.compute.manager [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1279.205966] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1279.206184] env[69927]: DEBUG nova.compute.manager [-] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1279.206273] env[69927]: DEBUG nova.network.neutron [-] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1279.372544] env[69927]: DEBUG nova.compute.utils [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1279.374384] env[69927]: DEBUG nova.compute.manager [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1279.374559] env[69927]: DEBUG nova.network.neutron [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1279.432875] env[69927]: DEBUG nova.policy [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '76414b2ae1aa4ab582c2b59fd4218005', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '544f2a021144492ba1aea46ce6075e53', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1279.580945] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18d2eb88-4eab-4e3a-ac87-1d5229f6e0f8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.591050] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed2d32c-6c73-45db-bd57-53d66929da56 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.626026] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0645151-966d-48c0-8cd0-ade5da10b798 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.635191] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b1c33bf-04be-485c-ace1-627b7e334624 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.658580] env[69927]: DEBUG nova.compute.provider_tree [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1279.800297] env[69927]: DEBUG nova.network.neutron [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Successfully created port: ee634287-1d03-4e08-bdc7-07ab41963c1c {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1279.880425] env[69927]: DEBUG nova.compute.manager [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1279.910430] env[69927]: DEBUG nova.compute.manager [req-071c0f60-16a6-4594-b871-74a686390c2b req-20415c93-59e1-4cb7-9c3e-58f3a171fbf6 service nova] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Received event network-vif-deleted-e257b275-32fb-40b6-be25-78208eb9442e {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1279.910607] env[69927]: INFO nova.compute.manager [req-071c0f60-16a6-4594-b871-74a686390c2b req-20415c93-59e1-4cb7-9c3e-58f3a171fbf6 service nova] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Neutron deleted interface e257b275-32fb-40b6-be25-78208eb9442e; detaching it from the instance and deleting it from the info cache [ 1279.910823] env[69927]: DEBUG nova.network.neutron [req-071c0f60-16a6-4594-b871-74a686390c2b req-20415c93-59e1-4cb7-9c3e-58f3a171fbf6 service nova] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1280.163126] env[69927]: DEBUG nova.scheduler.client.report [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1280.363970] env[69927]: DEBUG nova.network.neutron [-] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1280.401114] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "8b70b479-4a54-4bcb-813d-16cc0c9a67c5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1280.401485] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "8b70b479-4a54-4bcb-813d-16cc0c9a67c5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1280.401745] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "8b70b479-4a54-4bcb-813d-16cc0c9a67c5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1280.401983] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "8b70b479-4a54-4bcb-813d-16cc0c9a67c5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1280.402180] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "8b70b479-4a54-4bcb-813d-16cc0c9a67c5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1280.404596] env[69927]: INFO nova.compute.manager [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Terminating instance [ 1280.418467] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cf5f1755-2d94-4b1d-b249-898f81196c03 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.429895] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9835a8e2-41aa-4305-8436-88c0bc6837d1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.461796] env[69927]: DEBUG nova.compute.manager [req-071c0f60-16a6-4594-b871-74a686390c2b req-20415c93-59e1-4cb7-9c3e-58f3a171fbf6 service nova] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Detach interface failed, port_id=e257b275-32fb-40b6-be25-78208eb9442e, reason: Instance afdd23d0-c8e0-4d49-a188-525b6b3f31c8 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1280.668833] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.799s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1280.671205] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.464s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1280.671205] env[69927]: DEBUG nova.objects.instance [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lazy-loading 'resources' on Instance uuid 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1280.695953] env[69927]: INFO nova.scheduler.client.report [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Deleted allocations for instance 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11 [ 1280.868465] env[69927]: INFO nova.compute.manager [-] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Took 1.66 seconds to deallocate network for instance. [ 1280.890049] env[69927]: DEBUG nova.compute.manager [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1280.908987] env[69927]: DEBUG nova.compute.manager [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1280.909206] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1280.909479] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8a01c1f3-d493-45f6-9316-363ba0cbf117 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.920250] env[69927]: DEBUG nova.virt.hardware [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1280.920726] env[69927]: DEBUG nova.virt.hardware [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1280.920726] env[69927]: DEBUG nova.virt.hardware [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1280.920864] env[69927]: DEBUG nova.virt.hardware [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1280.920958] env[69927]: DEBUG nova.virt.hardware [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1280.921100] env[69927]: DEBUG nova.virt.hardware [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1280.921799] env[69927]: DEBUG nova.virt.hardware [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1280.921799] env[69927]: DEBUG nova.virt.hardware [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1280.921799] env[69927]: DEBUG nova.virt.hardware [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1280.922016] env[69927]: DEBUG nova.virt.hardware [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1280.922081] env[69927]: DEBUG nova.virt.hardware [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1280.923351] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0057fab2-bcb5-4b0e-bf15-085189e64650 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.927892] env[69927]: DEBUG oslo_vmware.api [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1280.927892] env[69927]: value = "task-4096942" [ 1280.927892] env[69927]: _type = "Task" [ 1280.927892] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.936021] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-670e8de1-52c8-44ac-a284-aa9b9917ef5b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.944590] env[69927]: DEBUG oslo_vmware.api [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096942, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.206433] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e0f50abd-47d6-4fe6-be3e-df5e56178d3e tempest-ServerRescueTestJSONUnderV235-236466356 tempest-ServerRescueTestJSONUnderV235-236466356-project-member] Lock "8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.904s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1281.210092] env[69927]: DEBUG nova.compute.manager [req-d0ca6b8c-58cd-4b43-b574-832f627da07d req-8c614d3d-7af5-4d54-9ffc-78864bd13b16 service nova] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Received event network-vif-plugged-ee634287-1d03-4e08-bdc7-07ab41963c1c {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1281.210092] env[69927]: DEBUG oslo_concurrency.lockutils [req-d0ca6b8c-58cd-4b43-b574-832f627da07d req-8c614d3d-7af5-4d54-9ffc-78864bd13b16 service nova] Acquiring lock "eb84d52d-7153-412b-9ed9-4b7986cdfbbf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1281.210092] env[69927]: DEBUG oslo_concurrency.lockutils [req-d0ca6b8c-58cd-4b43-b574-832f627da07d req-8c614d3d-7af5-4d54-9ffc-78864bd13b16 service nova] Lock "eb84d52d-7153-412b-9ed9-4b7986cdfbbf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1281.210092] env[69927]: DEBUG oslo_concurrency.lockutils [req-d0ca6b8c-58cd-4b43-b574-832f627da07d req-8c614d3d-7af5-4d54-9ffc-78864bd13b16 service nova] Lock "eb84d52d-7153-412b-9ed9-4b7986cdfbbf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1281.210092] env[69927]: DEBUG nova.compute.manager [req-d0ca6b8c-58cd-4b43-b574-832f627da07d req-8c614d3d-7af5-4d54-9ffc-78864bd13b16 service nova] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] No waiting events found dispatching network-vif-plugged-ee634287-1d03-4e08-bdc7-07ab41963c1c {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1281.210397] env[69927]: WARNING nova.compute.manager [req-d0ca6b8c-58cd-4b43-b574-832f627da07d req-8c614d3d-7af5-4d54-9ffc-78864bd13b16 service nova] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Received unexpected event network-vif-plugged-ee634287-1d03-4e08-bdc7-07ab41963c1c for instance with vm_state building and task_state spawning. [ 1281.303656] env[69927]: DEBUG nova.network.neutron [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Successfully updated port: ee634287-1d03-4e08-bdc7-07ab41963c1c {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1281.341985] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f073f0-3d5c-44f4-a864-78d43c664c6a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.352961] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dcfd7f8-0a58-45a9-b57f-b2da7a7528f8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.385973] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1281.387033] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-642349f0-81eb-4558-9f34-57fa76525593 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.396225] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ab66b9e-7335-4af3-ae76-af4e61fd3a44 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.413082] env[69927]: DEBUG nova.compute.provider_tree [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1281.438995] env[69927]: DEBUG oslo_vmware.api [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096942, 'name': PowerOffVM_Task, 'duration_secs': 0.22502} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.439528] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1281.439528] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Volume detach. Driver type: vmdk {{(pid=69927) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1281.439761] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811592', 'volume_id': 'f5e28970-f462-4070-87f2-2e1e9b4cab4f', 'name': 'volume-f5e28970-f462-4070-87f2-2e1e9b4cab4f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '8b70b479-4a54-4bcb-813d-16cc0c9a67c5', 'attached_at': '2025-05-13T19:47:07.000000', 'detached_at': '', 'volume_id': 'f5e28970-f462-4070-87f2-2e1e9b4cab4f', 'serial': 'f5e28970-f462-4070-87f2-2e1e9b4cab4f'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1281.440744] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a416352-0abf-476b-a74b-1c8a7a1279bc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.463894] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01b811d6-f2b2-43be-8281-0701c276453d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.471269] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a2e6bc-47a3-44ec-9d9b-b1ad3cb6356b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.494445] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2fc352f-feb9-4584-95c2-123eeaa033c1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.510372] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] The volume has not been displaced from its original location: [datastore2] volume-f5e28970-f462-4070-87f2-2e1e9b4cab4f/volume-f5e28970-f462-4070-87f2-2e1e9b4cab4f.vmdk. No consolidation needed. {{(pid=69927) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1281.515564] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Reconfiguring VM instance instance-0000006a to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1281.515913] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1bb88b0f-9c82-48d4-a9c6-b3350623d70d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.533750] env[69927]: DEBUG oslo_vmware.api [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1281.533750] env[69927]: value = "task-4096943" [ 1281.533750] env[69927]: _type = "Task" [ 1281.533750] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.541774] env[69927]: DEBUG oslo_vmware.api [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096943, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.806121] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "refresh_cache-eb84d52d-7153-412b-9ed9-4b7986cdfbbf" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.806322] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired lock "refresh_cache-eb84d52d-7153-412b-9ed9-4b7986cdfbbf" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1281.806441] env[69927]: DEBUG nova.network.neutron [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1281.919026] env[69927]: DEBUG nova.scheduler.client.report [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1282.048030] env[69927]: DEBUG oslo_vmware.api [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096943, 'name': ReconfigVM_Task, 'duration_secs': 0.210574} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.048030] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Reconfigured VM instance instance-0000006a to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1282.050191] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce53d886-569d-4922-8832-c24160b2ec6c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.068986] env[69927]: DEBUG oslo_vmware.api [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1282.068986] env[69927]: value = "task-4096944" [ 1282.068986] env[69927]: _type = "Task" [ 1282.068986] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.078071] env[69927]: DEBUG oslo_vmware.api [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096944, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.349680] env[69927]: DEBUG nova.network.neutron [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1282.423120] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.752s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1282.428743] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.040s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1282.428743] env[69927]: DEBUG nova.objects.instance [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lazy-loading 'resources' on Instance uuid afdd23d0-c8e0-4d49-a188-525b6b3f31c8 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1282.449540] env[69927]: INFO nova.scheduler.client.report [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Deleted allocations for instance 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d [ 1282.550470] env[69927]: DEBUG nova.network.neutron [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Updating instance_info_cache with network_info: [{"id": "ee634287-1d03-4e08-bdc7-07ab41963c1c", "address": "fa:16:3e:51:fa:87", "network": {"id": "5e9a6068-d0f1-44b2-b2a2-b87ebc0d53f2", "bridge": "br-int", "label": "tempest-ServersTestJSON-2067990706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "544f2a021144492ba1aea46ce6075e53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee634287-1d", "ovs_interfaceid": "ee634287-1d03-4e08-bdc7-07ab41963c1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1282.580241] env[69927]: DEBUG oslo_vmware.api [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096944, 'name': ReconfigVM_Task, 'duration_secs': 0.154533} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.580568] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811592', 'volume_id': 'f5e28970-f462-4070-87f2-2e1e9b4cab4f', 'name': 'volume-f5e28970-f462-4070-87f2-2e1e9b4cab4f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '8b70b479-4a54-4bcb-813d-16cc0c9a67c5', 'attached_at': '2025-05-13T19:47:07.000000', 'detached_at': '', 'volume_id': 'f5e28970-f462-4070-87f2-2e1e9b4cab4f', 'serial': 'f5e28970-f462-4070-87f2-2e1e9b4cab4f'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1282.580827] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1282.581577] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-911a71c7-a979-4aac-9958-1ef6cf912df5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.588437] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1282.588669] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-abff8266-ee3e-467e-b964-d8ab76649f4b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.668362] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1282.668616] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1282.668897] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Deleting the datastore file [datastore1] 8b70b479-4a54-4bcb-813d-16cc0c9a67c5 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1282.669222] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-84d37513-e5a0-46df-b6df-f5a8aa6a8eb6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.676730] env[69927]: DEBUG oslo_vmware.api [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1282.676730] env[69927]: value = "task-4096946" [ 1282.676730] env[69927]: _type = "Task" [ 1282.676730] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.686453] env[69927]: DEBUG oslo_vmware.api [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096946, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.958837] env[69927]: DEBUG oslo_concurrency.lockutils [None req-7bd81932-d7eb-4e78-8cd7-1ac18981c605 tempest-ListServerFiltersTestJSON-532103941 tempest-ListServerFiltersTestJSON-532103941-project-member] Lock "80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.155s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1283.053377] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Releasing lock "refresh_cache-eb84d52d-7153-412b-9ed9-4b7986cdfbbf" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1283.053711] env[69927]: DEBUG nova.compute.manager [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Instance network_info: |[{"id": "ee634287-1d03-4e08-bdc7-07ab41963c1c", "address": "fa:16:3e:51:fa:87", "network": {"id": "5e9a6068-d0f1-44b2-b2a2-b87ebc0d53f2", "bridge": "br-int", "label": "tempest-ServersTestJSON-2067990706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "544f2a021144492ba1aea46ce6075e53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee634287-1d", "ovs_interfaceid": "ee634287-1d03-4e08-bdc7-07ab41963c1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1283.054146] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:fa:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ed8a78a1-87dc-488e-a092-afd1c2a2ddde', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ee634287-1d03-4e08-bdc7-07ab41963c1c', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1283.061552] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1283.062733] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1283.063470] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-204fcdc6-f0a0-4d00-b4b9-eed5ece70be6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.066522] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c354f961-8450-4f6b-9b23-3e60b1c7be8a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.089369] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48dd7ff3-33bf-4ddc-9f88-52a841f33ab8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.092585] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1283.092585] env[69927]: value = "task-4096947" [ 1283.092585] env[69927]: _type = "Task" [ 1283.092585] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.122346] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-526413df-b454-4fbb-b9d2-c451236b9f26 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.128107] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096947, 'name': CreateVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.134123] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eacdf697-e12c-4485-9cc2-4aebc02e38c0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.148562] env[69927]: DEBUG nova.compute.provider_tree [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1283.190137] env[69927]: DEBUG oslo_vmware.api [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096946, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162835} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.190137] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1283.190137] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1283.190137] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1283.190137] env[69927]: INFO nova.compute.manager [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Took 2.28 seconds to destroy the instance on the hypervisor. [ 1283.190137] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1283.190137] env[69927]: DEBUG nova.compute.manager [-] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1283.190137] env[69927]: DEBUG nova.network.neutron [-] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1283.245404] env[69927]: DEBUG nova.compute.manager [req-43befec8-7956-404a-a942-457544806c8e req-0df755a0-5f4d-4efd-b7ea-07fcca2a112c service nova] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Received event network-changed-ee634287-1d03-4e08-bdc7-07ab41963c1c {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1283.245631] env[69927]: DEBUG nova.compute.manager [req-43befec8-7956-404a-a942-457544806c8e req-0df755a0-5f4d-4efd-b7ea-07fcca2a112c service nova] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Refreshing instance network info cache due to event network-changed-ee634287-1d03-4e08-bdc7-07ab41963c1c. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1283.246023] env[69927]: DEBUG oslo_concurrency.lockutils [req-43befec8-7956-404a-a942-457544806c8e req-0df755a0-5f4d-4efd-b7ea-07fcca2a112c service nova] Acquiring lock "refresh_cache-eb84d52d-7153-412b-9ed9-4b7986cdfbbf" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1283.246212] env[69927]: DEBUG oslo_concurrency.lockutils [req-43befec8-7956-404a-a942-457544806c8e req-0df755a0-5f4d-4efd-b7ea-07fcca2a112c service nova] Acquired lock "refresh_cache-eb84d52d-7153-412b-9ed9-4b7986cdfbbf" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1283.246422] env[69927]: DEBUG nova.network.neutron [req-43befec8-7956-404a-a942-457544806c8e req-0df755a0-5f4d-4efd-b7ea-07fcca2a112c service nova] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Refreshing network info cache for port ee634287-1d03-4e08-bdc7-07ab41963c1c {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1283.603166] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096947, 'name': CreateVM_Task, 'duration_secs': 0.380401} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.603480] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1283.635167] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1283.635360] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1283.635678] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1283.635957] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cefae17c-6d0d-4646-81ca-0cbbcec73c3a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.642476] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1283.642476] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52099700-5c74-8d6c-e18e-84e93860d606" [ 1283.642476] env[69927]: _type = "Task" [ 1283.642476] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.651623] env[69927]: DEBUG nova.scheduler.client.report [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1283.655334] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52099700-5c74-8d6c-e18e-84e93860d606, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.968668] env[69927]: DEBUG nova.network.neutron [req-43befec8-7956-404a-a942-457544806c8e req-0df755a0-5f4d-4efd-b7ea-07fcca2a112c service nova] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Updated VIF entry in instance network info cache for port ee634287-1d03-4e08-bdc7-07ab41963c1c. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1283.969235] env[69927]: DEBUG nova.network.neutron [req-43befec8-7956-404a-a942-457544806c8e req-0df755a0-5f4d-4efd-b7ea-07fcca2a112c service nova] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Updating instance_info_cache with network_info: [{"id": "ee634287-1d03-4e08-bdc7-07ab41963c1c", "address": "fa:16:3e:51:fa:87", "network": {"id": "5e9a6068-d0f1-44b2-b2a2-b87ebc0d53f2", "bridge": "br-int", "label": "tempest-ServersTestJSON-2067990706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "544f2a021144492ba1aea46ce6075e53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee634287-1d", "ovs_interfaceid": "ee634287-1d03-4e08-bdc7-07ab41963c1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1284.153327] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52099700-5c74-8d6c-e18e-84e93860d606, 'name': SearchDatastore_Task, 'duration_secs': 0.009514} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.153637] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1284.153928] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1284.154240] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1284.154422] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1284.154635] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1284.155020] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca26f4f7-d589-44d1-b4e9-fea8e2059714 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.157522] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.731s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1284.167912] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1284.168177] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1284.169555] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f46c707b-9709-448a-8f67-fa654df1e697 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.175692] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1284.175692] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521c7302-939f-6461-9edb-5bb71af246ab" [ 1284.175692] env[69927]: _type = "Task" [ 1284.175692] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.184613] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521c7302-939f-6461-9edb-5bb71af246ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.185892] env[69927]: INFO nova.scheduler.client.report [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Deleted allocations for instance afdd23d0-c8e0-4d49-a188-525b6b3f31c8 [ 1284.301049] env[69927]: DEBUG nova.network.neutron [-] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1284.472837] env[69927]: DEBUG oslo_concurrency.lockutils [req-43befec8-7956-404a-a942-457544806c8e req-0df755a0-5f4d-4efd-b7ea-07fcca2a112c service nova] Releasing lock "refresh_cache-eb84d52d-7153-412b-9ed9-4b7986cdfbbf" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1284.687141] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521c7302-939f-6461-9edb-5bb71af246ab, 'name': SearchDatastore_Task, 'duration_secs': 0.010644} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.687976] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce06f5b4-1ff3-484c-8ae7-4e1af3b7a441 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.693596] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b928d11d-e8df-436e-af78-38a87c8cb1d8 tempest-AttachVolumeNegativeTest-700673554 tempest-AttachVolumeNegativeTest-700673554-project-member] Lock "afdd23d0-c8e0-4d49-a188-525b6b3f31c8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.117s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1284.698025] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1284.698025] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5253af3a-27d3-446c-8591-7c18cc7d0af7" [ 1284.698025] env[69927]: _type = "Task" [ 1284.698025] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.708615] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5253af3a-27d3-446c-8591-7c18cc7d0af7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.804297] env[69927]: INFO nova.compute.manager [-] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Took 1.61 seconds to deallocate network for instance. [ 1285.212666] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5253af3a-27d3-446c-8591-7c18cc7d0af7, 'name': SearchDatastore_Task, 'duration_secs': 0.013155} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.212666] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1285.212960] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] eb84d52d-7153-412b-9ed9-4b7986cdfbbf/eb84d52d-7153-412b-9ed9-4b7986cdfbbf.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1285.213268] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-617e3bf2-52ff-4aad-8abb-213020b2b292 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.221268] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1285.221268] env[69927]: value = "task-4096948" [ 1285.221268] env[69927]: _type = "Task" [ 1285.221268] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.230842] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096948, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.283752] env[69927]: DEBUG nova.compute.manager [req-6aaafbc1-e420-468c-832b-72cfbb694ccb req-698b7e70-62d6-41b9-b3b3-6eb376b803e6 service nova] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Received event network-vif-deleted-3e85edce-fa8a-45d4-b109-5bdd98a06303 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1285.354557] env[69927]: INFO nova.compute.manager [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Took 0.55 seconds to detach 1 volumes for instance. [ 1285.732443] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096948, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.459509} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.733084] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] eb84d52d-7153-412b-9ed9-4b7986cdfbbf/eb84d52d-7153-412b-9ed9-4b7986cdfbbf.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1285.733316] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1285.733570] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aa0efcff-7726-43d1-b302-3c597a067ef6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.740226] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1285.740226] env[69927]: value = "task-4096950" [ 1285.740226] env[69927]: _type = "Task" [ 1285.740226] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.748252] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096950, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.864411] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1285.865437] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1285.865905] env[69927]: DEBUG nova.objects.instance [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lazy-loading 'resources' on Instance uuid 8b70b479-4a54-4bcb-813d-16cc0c9a67c5 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1286.256825] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096950, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071113} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.256825] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1286.257358] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab86e1a4-2b99-4f47-afd9-73f98dae2acd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.280205] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] eb84d52d-7153-412b-9ed9-4b7986cdfbbf/eb84d52d-7153-412b-9ed9-4b7986cdfbbf.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1286.280336] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2fd90963-649f-4429-8c44-78e6f933d24c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.300865] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1286.300865] env[69927]: value = "task-4096951" [ 1286.300865] env[69927]: _type = "Task" [ 1286.300865] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.309594] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096951, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.516936] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d82243d7-8205-4ab9-b91f-71013230611b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.525839] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3663533e-fc53-4c54-9e81-a49a2cf10fb4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.569136] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0126d88c-8482-4290-9be6-2cb3eda188cf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.578144] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d4b4e7d-93fe-4d0d-a6d1-1d90eb649be7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.592402] env[69927]: DEBUG nova.compute.provider_tree [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1286.811954] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096951, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.096171] env[69927]: DEBUG nova.scheduler.client.report [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1287.311734] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096951, 'name': ReconfigVM_Task, 'duration_secs': 0.825609} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.311734] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Reconfigured VM instance instance-00000075 to attach disk [datastore1] eb84d52d-7153-412b-9ed9-4b7986cdfbbf/eb84d52d-7153-412b-9ed9-4b7986cdfbbf.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1287.312508] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1b461bd4-8f26-4cc0-8498-90c485b6ebd6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.320515] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1287.320515] env[69927]: value = "task-4096952" [ 1287.320515] env[69927]: _type = "Task" [ 1287.320515] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.329734] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096952, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.603046] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.737s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1287.620173] env[69927]: INFO nova.scheduler.client.report [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Deleted allocations for instance 8b70b479-4a54-4bcb-813d-16cc0c9a67c5 [ 1287.831392] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096952, 'name': Rename_Task, 'duration_secs': 0.174318} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.831816] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1287.832071] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e15b4f30-24a4-47bd-bd95-bb69f0fb2a31 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.839354] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1287.839354] env[69927]: value = "task-4096954" [ 1287.839354] env[69927]: _type = "Task" [ 1287.839354] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.847958] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096954, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.064720] env[69927]: DEBUG oslo_concurrency.lockutils [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1288.064999] env[69927]: DEBUG oslo_concurrency.lockutils [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1288.129295] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dcba227c-2475-40d4-a108-cddbebbf64da tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "8b70b479-4a54-4bcb-813d-16cc0c9a67c5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.728s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1288.352161] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096954, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.567479] env[69927]: DEBUG nova.compute.manager [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1288.853591] env[69927]: DEBUG oslo_vmware.api [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096954, 'name': PowerOnVM_Task, 'duration_secs': 0.537035} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.855166] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1288.855383] env[69927]: INFO nova.compute.manager [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Took 7.97 seconds to spawn the instance on the hypervisor. [ 1288.855561] env[69927]: DEBUG nova.compute.manager [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1288.858832] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a0aef01-1376-4eb3-8d1f-f7154798a45c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.861850] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "6bfafa83-a9e2-4f7d-bbad-6b356f173b68" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1288.862369] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "6bfafa83-a9e2-4f7d-bbad-6b356f173b68" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1289.090821] env[69927]: DEBUG oslo_concurrency.lockutils [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1289.091122] env[69927]: DEBUG oslo_concurrency.lockutils [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1289.092678] env[69927]: INFO nova.compute.claims [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1289.365312] env[69927]: DEBUG nova.compute.manager [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1289.379652] env[69927]: INFO nova.compute.manager [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Took 15.03 seconds to build instance. [ 1289.881279] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d06c8ac9-2a4d-4ace-b6bb-7374368ef9a3 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "eb84d52d-7153-412b-9ed9-4b7986cdfbbf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.565s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1289.888704] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1290.224796] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da4e0536-5187-4573-ace1-f359bfb9768c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.231008] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4ab31b8e-e3e3-4960-bfb6-be737009b2c4 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "eb84d52d-7153-412b-9ed9-4b7986cdfbbf" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1290.231283] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4ab31b8e-e3e3-4960-bfb6-be737009b2c4 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "eb84d52d-7153-412b-9ed9-4b7986cdfbbf" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1290.231462] env[69927]: DEBUG nova.compute.manager [None req-4ab31b8e-e3e3-4960-bfb6-be737009b2c4 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1290.234019] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b560e7-cb2d-470c-8981-01e9e7ce8802 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.237503] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd41a78-43b3-440d-a4a5-e2a17b352a29 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.244444] env[69927]: DEBUG nova.compute.manager [None req-4ab31b8e-e3e3-4960-bfb6-be737009b2c4 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69927) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1290.245019] env[69927]: DEBUG nova.objects.instance [None req-4ab31b8e-e3e3-4960-bfb6-be737009b2c4 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lazy-loading 'flavor' on Instance uuid eb84d52d-7153-412b-9ed9-4b7986cdfbbf {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1290.274736] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d122049-40c5-414a-955c-a89df054ddf5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.283369] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fac8efa0-eaf0-4adf-8d9a-6879574c4114 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.297583] env[69927]: DEBUG nova.compute.provider_tree [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1290.800366] env[69927]: DEBUG nova.scheduler.client.report [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1291.253024] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ab31b8e-e3e3-4960-bfb6-be737009b2c4 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1291.253385] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f2d3489-c6fc-4087-9c00-5c32d161b15f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.262133] env[69927]: DEBUG oslo_vmware.api [None req-4ab31b8e-e3e3-4960-bfb6-be737009b2c4 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1291.262133] env[69927]: value = "task-4096956" [ 1291.262133] env[69927]: _type = "Task" [ 1291.262133] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.270982] env[69927]: DEBUG oslo_vmware.api [None req-4ab31b8e-e3e3-4960-bfb6-be737009b2c4 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096956, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.306121] env[69927]: DEBUG oslo_concurrency.lockutils [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.215s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1291.306715] env[69927]: DEBUG nova.compute.manager [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1291.309747] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.421s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1291.311358] env[69927]: INFO nova.compute.claims [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1291.772513] env[69927]: DEBUG oslo_vmware.api [None req-4ab31b8e-e3e3-4960-bfb6-be737009b2c4 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096956, 'name': PowerOffVM_Task, 'duration_secs': 0.203096} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.772768] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ab31b8e-e3e3-4960-bfb6-be737009b2c4 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1291.772970] env[69927]: DEBUG nova.compute.manager [None req-4ab31b8e-e3e3-4960-bfb6-be737009b2c4 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1291.773739] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eec1028-71f6-4513-9e10-630c2ad2ca77 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.816221] env[69927]: DEBUG nova.compute.utils [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1291.820358] env[69927]: DEBUG nova.compute.manager [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1291.820536] env[69927]: DEBUG nova.network.neutron [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1291.868407] env[69927]: DEBUG nova.policy [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef08164611894c289d4c30194d91526a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0823381b9f644adf818b490c551f5a3f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1292.285318] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4ab31b8e-e3e3-4960-bfb6-be737009b2c4 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "eb84d52d-7153-412b-9ed9-4b7986cdfbbf" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.054s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1292.320173] env[69927]: DEBUG nova.compute.manager [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1292.462262] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad9d4609-ee7e-4ef2-b363-12b83ad82005 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.471141] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6b710e-ab7f-465a-aaa0-929fe1f289fd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.502334] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1c740b-03a0-4b4f-bf19-0f22a548a1b0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.510517] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd80822-ef82-44ee-93fa-33a53ac840dc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.524808] env[69927]: DEBUG nova.compute.provider_tree [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1292.537041] env[69927]: DEBUG nova.network.neutron [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Successfully created port: cefb857a-0190-4924-a605-749a4858cef2 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1293.027669] env[69927]: DEBUG nova.scheduler.client.report [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1293.333475] env[69927]: DEBUG nova.compute.manager [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1293.365617] env[69927]: DEBUG nova.virt.hardware [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1293.366099] env[69927]: DEBUG nova.virt.hardware [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1293.366450] env[69927]: DEBUG nova.virt.hardware [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1293.366828] env[69927]: DEBUG nova.virt.hardware [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1293.367201] env[69927]: DEBUG nova.virt.hardware [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1293.367482] env[69927]: DEBUG nova.virt.hardware [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1293.367960] env[69927]: DEBUG nova.virt.hardware [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1293.368325] env[69927]: DEBUG nova.virt.hardware [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1293.368670] env[69927]: DEBUG nova.virt.hardware [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1293.369088] env[69927]: DEBUG nova.virt.hardware [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1293.369444] env[69927]: DEBUG nova.virt.hardware [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1293.371072] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0eec295-b10f-4aa8-a34b-36dfba0fc319 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.383126] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c0ad4a-4dbd-4204-8aac-c599d1413fe8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.532552] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.223s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1293.533102] env[69927]: DEBUG nova.compute.manager [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1293.602029] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "eb84d52d-7153-412b-9ed9-4b7986cdfbbf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1293.602243] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "eb84d52d-7153-412b-9ed9-4b7986cdfbbf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1293.602430] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "eb84d52d-7153-412b-9ed9-4b7986cdfbbf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1293.602637] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "eb84d52d-7153-412b-9ed9-4b7986cdfbbf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1293.602811] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "eb84d52d-7153-412b-9ed9-4b7986cdfbbf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1293.605157] env[69927]: INFO nova.compute.manager [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Terminating instance [ 1294.038811] env[69927]: DEBUG nova.compute.utils [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1294.040727] env[69927]: DEBUG nova.compute.manager [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1294.041267] env[69927]: DEBUG nova.network.neutron [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1294.089290] env[69927]: DEBUG nova.compute.manager [req-b1083d1d-666f-47a0-99ca-908ec7cceefd req-99a50259-ad3b-46de-aab8-92de76d54ce7 service nova] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Received event network-vif-plugged-cefb857a-0190-4924-a605-749a4858cef2 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1294.089516] env[69927]: DEBUG oslo_concurrency.lockutils [req-b1083d1d-666f-47a0-99ca-908ec7cceefd req-99a50259-ad3b-46de-aab8-92de76d54ce7 service nova] Acquiring lock "ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1294.089711] env[69927]: DEBUG oslo_concurrency.lockutils [req-b1083d1d-666f-47a0-99ca-908ec7cceefd req-99a50259-ad3b-46de-aab8-92de76d54ce7 service nova] Lock "ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1294.089914] env[69927]: DEBUG oslo_concurrency.lockutils [req-b1083d1d-666f-47a0-99ca-908ec7cceefd req-99a50259-ad3b-46de-aab8-92de76d54ce7 service nova] Lock "ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1294.090236] env[69927]: DEBUG nova.compute.manager [req-b1083d1d-666f-47a0-99ca-908ec7cceefd req-99a50259-ad3b-46de-aab8-92de76d54ce7 service nova] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] No waiting events found dispatching network-vif-plugged-cefb857a-0190-4924-a605-749a4858cef2 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1294.090504] env[69927]: WARNING nova.compute.manager [req-b1083d1d-666f-47a0-99ca-908ec7cceefd req-99a50259-ad3b-46de-aab8-92de76d54ce7 service nova] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Received unexpected event network-vif-plugged-cefb857a-0190-4924-a605-749a4858cef2 for instance with vm_state building and task_state spawning. [ 1294.096149] env[69927]: DEBUG nova.policy [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '16c1e562693c466c8786016a777f9f32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9cf6bb3492c642aa9a168e484299289c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1294.110524] env[69927]: DEBUG nova.compute.manager [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1294.110524] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1294.111538] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f59f5ec-6fb3-4264-aab4-a993f7f5e599 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.119865] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1294.120270] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f0bb2788-f292-4506-ba15-11a7fba7efab {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.191110] env[69927]: DEBUG nova.network.neutron [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Successfully updated port: cefb857a-0190-4924-a605-749a4858cef2 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1294.200889] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1294.201213] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1294.201425] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Deleting the datastore file [datastore1] eb84d52d-7153-412b-9ed9-4b7986cdfbbf {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1294.201944] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d49fab7-529b-4f1b-9371-34a18736537f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.209573] env[69927]: DEBUG oslo_vmware.api [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1294.209573] env[69927]: value = "task-4096958" [ 1294.209573] env[69927]: _type = "Task" [ 1294.209573] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.223391] env[69927]: DEBUG oslo_vmware.api [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096958, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.546666] env[69927]: DEBUG nova.compute.manager [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1294.614721] env[69927]: DEBUG nova.network.neutron [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Successfully created port: cdd401dc-1a64-40f2-931e-f19611bb56d3 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1294.697878] env[69927]: DEBUG oslo_concurrency.lockutils [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "refresh_cache-ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1294.698111] env[69927]: DEBUG oslo_concurrency.lockutils [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired lock "refresh_cache-ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1294.698395] env[69927]: DEBUG nova.network.neutron [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1294.721641] env[69927]: DEBUG oslo_vmware.api [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096958, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145736} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.723505] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1294.723505] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1294.723505] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1294.723505] env[69927]: INFO nova.compute.manager [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1294.723505] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1294.723505] env[69927]: DEBUG nova.compute.manager [-] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1294.723505] env[69927]: DEBUG nova.network.neutron [-] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1295.231604] env[69927]: DEBUG nova.network.neutron [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1295.267517] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "693a6c6b-8d1c-405e-bb17-73259e28f556" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1295.267794] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "693a6c6b-8d1c-405e-bb17-73259e28f556" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1295.268008] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "693a6c6b-8d1c-405e-bb17-73259e28f556-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1295.268197] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "693a6c6b-8d1c-405e-bb17-73259e28f556-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1295.268361] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "693a6c6b-8d1c-405e-bb17-73259e28f556-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1295.270515] env[69927]: INFO nova.compute.manager [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Terminating instance [ 1295.456648] env[69927]: DEBUG nova.network.neutron [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Updating instance_info_cache with network_info: [{"id": "cefb857a-0190-4924-a605-749a4858cef2", "address": "fa:16:3e:8f:db:3a", "network": {"id": "1b67ec75-5b1a-4408-976d-0bd585378451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1628882259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0823381b9f644adf818b490c551f5a3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcefb857a-01", "ovs_interfaceid": "cefb857a-0190-4924-a605-749a4858cef2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1295.525912] env[69927]: DEBUG nova.network.neutron [-] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1295.556764] env[69927]: DEBUG nova.compute.manager [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1295.585238] env[69927]: DEBUG nova.virt.hardware [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1295.585931] env[69927]: DEBUG nova.virt.hardware [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1295.585931] env[69927]: DEBUG nova.virt.hardware [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1295.585931] env[69927]: DEBUG nova.virt.hardware [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1295.585931] env[69927]: DEBUG nova.virt.hardware [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1295.586223] env[69927]: DEBUG nova.virt.hardware [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1295.586294] env[69927]: DEBUG nova.virt.hardware [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1295.586456] env[69927]: DEBUG nova.virt.hardware [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1295.586624] env[69927]: DEBUG nova.virt.hardware [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1295.586784] env[69927]: DEBUG nova.virt.hardware [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1295.586957] env[69927]: DEBUG nova.virt.hardware [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1295.587890] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f76ff417-f23d-4f75-b91c-5ebba900ecae {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.597214] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b3aba81-d08a-4dae-8fa5-b968ddc068a8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.774441] env[69927]: DEBUG nova.compute.manager [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1295.774681] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1295.775589] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d77bf392-022e-4886-941c-9671d0c33925 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.783608] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1295.783872] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-25e046c5-65e7-4a38-a56a-13efa10bfe1f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.791241] env[69927]: DEBUG oslo_vmware.api [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1295.791241] env[69927]: value = "task-4096959" [ 1295.791241] env[69927]: _type = "Task" [ 1295.791241] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.802701] env[69927]: DEBUG oslo_vmware.api [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096959, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.960786] env[69927]: DEBUG oslo_concurrency.lockutils [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Releasing lock "refresh_cache-ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1295.961230] env[69927]: DEBUG nova.compute.manager [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Instance network_info: |[{"id": "cefb857a-0190-4924-a605-749a4858cef2", "address": "fa:16:3e:8f:db:3a", "network": {"id": "1b67ec75-5b1a-4408-976d-0bd585378451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1628882259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0823381b9f644adf818b490c551f5a3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcefb857a-01", "ovs_interfaceid": "cefb857a-0190-4924-a605-749a4858cef2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1295.961675] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:db:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0248a27a-1d7f-4195-987b-06bfc8467347', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cefb857a-0190-4924-a605-749a4858cef2', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1295.970136] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Creating folder: Project (0823381b9f644adf818b490c551f5a3f). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1295.970470] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8e161601-5fff-4866-bbf6-3a27f0d53a0c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.986149] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Created folder: Project (0823381b9f644adf818b490c551f5a3f) in parent group-v811283. [ 1295.986583] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Creating folder: Instances. Parent ref: group-v811605. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1295.986921] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-10145958-3d3c-4685-8b4f-bf3eaa2b71a6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.999160] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Created folder: Instances in parent group-v811605. [ 1295.999441] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1295.999649] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1295.999909] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ae47f21f-588e-4c2a-bee4-1945928b6536 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.022737] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1296.022737] env[69927]: value = "task-4096962" [ 1296.022737] env[69927]: _type = "Task" [ 1296.022737] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.029071] env[69927]: INFO nova.compute.manager [-] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Took 1.31 seconds to deallocate network for instance. [ 1296.036723] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096962, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.125553] env[69927]: DEBUG nova.compute.manager [req-fbc1663a-9f6b-49f6-8555-3d6dc8fb8593 req-e5f517aa-4908-4fa8-bc84-56605aed614f service nova] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Received event network-changed-cefb857a-0190-4924-a605-749a4858cef2 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1296.125775] env[69927]: DEBUG nova.compute.manager [req-fbc1663a-9f6b-49f6-8555-3d6dc8fb8593 req-e5f517aa-4908-4fa8-bc84-56605aed614f service nova] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Refreshing instance network info cache due to event network-changed-cefb857a-0190-4924-a605-749a4858cef2. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1296.125998] env[69927]: DEBUG oslo_concurrency.lockutils [req-fbc1663a-9f6b-49f6-8555-3d6dc8fb8593 req-e5f517aa-4908-4fa8-bc84-56605aed614f service nova] Acquiring lock "refresh_cache-ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.126164] env[69927]: DEBUG oslo_concurrency.lockutils [req-fbc1663a-9f6b-49f6-8555-3d6dc8fb8593 req-e5f517aa-4908-4fa8-bc84-56605aed614f service nova] Acquired lock "refresh_cache-ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1296.126337] env[69927]: DEBUG nova.network.neutron [req-fbc1663a-9f6b-49f6-8555-3d6dc8fb8593 req-e5f517aa-4908-4fa8-bc84-56605aed614f service nova] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Refreshing network info cache for port cefb857a-0190-4924-a605-749a4858cef2 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1296.128275] env[69927]: DEBUG nova.compute.manager [req-16e14512-1f22-4ec8-8f8a-c83dfc496a0c req-e65fd25d-784f-465b-8b70-bb7b1b0a6e39 service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Received event network-vif-plugged-cdd401dc-1a64-40f2-931e-f19611bb56d3 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1296.128482] env[69927]: DEBUG oslo_concurrency.lockutils [req-16e14512-1f22-4ec8-8f8a-c83dfc496a0c req-e65fd25d-784f-465b-8b70-bb7b1b0a6e39 service nova] Acquiring lock "6bfafa83-a9e2-4f7d-bbad-6b356f173b68-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1296.128683] env[69927]: DEBUG oslo_concurrency.lockutils [req-16e14512-1f22-4ec8-8f8a-c83dfc496a0c req-e65fd25d-784f-465b-8b70-bb7b1b0a6e39 service nova] Lock "6bfafa83-a9e2-4f7d-bbad-6b356f173b68-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1296.128945] env[69927]: DEBUG oslo_concurrency.lockutils [req-16e14512-1f22-4ec8-8f8a-c83dfc496a0c req-e65fd25d-784f-465b-8b70-bb7b1b0a6e39 service nova] Lock "6bfafa83-a9e2-4f7d-bbad-6b356f173b68-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1296.129268] env[69927]: DEBUG nova.compute.manager [req-16e14512-1f22-4ec8-8f8a-c83dfc496a0c req-e65fd25d-784f-465b-8b70-bb7b1b0a6e39 service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] No waiting events found dispatching network-vif-plugged-cdd401dc-1a64-40f2-931e-f19611bb56d3 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1296.129867] env[69927]: WARNING nova.compute.manager [req-16e14512-1f22-4ec8-8f8a-c83dfc496a0c req-e65fd25d-784f-465b-8b70-bb7b1b0a6e39 service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Received unexpected event network-vif-plugged-cdd401dc-1a64-40f2-931e-f19611bb56d3 for instance with vm_state building and task_state spawning. [ 1296.224746] env[69927]: DEBUG nova.network.neutron [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Successfully updated port: cdd401dc-1a64-40f2-931e-f19611bb56d3 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1296.303116] env[69927]: DEBUG oslo_vmware.api [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096959, 'name': PowerOffVM_Task, 'duration_secs': 0.230564} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.303477] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1296.303678] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1296.304126] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0da82b95-76ce-44b1-9215-d6006d4e4463 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.402269] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1296.402269] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1296.402269] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Deleting the datastore file [datastore2] 693a6c6b-8d1c-405e-bb17-73259e28f556 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1296.402269] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dde26fd8-733b-4464-8cae-078c59370050 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.412969] env[69927]: DEBUG oslo_vmware.api [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for the task: (returnval){ [ 1296.412969] env[69927]: value = "task-4096964" [ 1296.412969] env[69927]: _type = "Task" [ 1296.412969] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.422867] env[69927]: DEBUG oslo_vmware.api [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096964, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.533020] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096962, 'name': CreateVM_Task, 'duration_secs': 0.362541} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.533115] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1296.533796] env[69927]: DEBUG oslo_concurrency.lockutils [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.533964] env[69927]: DEBUG oslo_concurrency.lockutils [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1296.534452] env[69927]: DEBUG oslo_concurrency.lockutils [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1296.534708] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0f14e96-6e6a-45af-80bb-cb577e8e6328 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.538417] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1296.538669] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1296.538889] env[69927]: DEBUG nova.objects.instance [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lazy-loading 'resources' on Instance uuid eb84d52d-7153-412b-9ed9-4b7986cdfbbf {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1296.542306] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1296.542306] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5266c616-372e-8aed-7066-dffca30fd935" [ 1296.542306] env[69927]: _type = "Task" [ 1296.542306] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.553699] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5266c616-372e-8aed-7066-dffca30fd935, 'name': SearchDatastore_Task, 'duration_secs': 0.010075} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.553877] env[69927]: DEBUG oslo_concurrency.lockutils [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1296.554034] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1296.554280] env[69927]: DEBUG oslo_concurrency.lockutils [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.554427] env[69927]: DEBUG oslo_concurrency.lockutils [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1296.554608] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1296.554887] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82953b83-bf97-4001-af4b-a06ba5e61bd5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.563666] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1296.563989] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1296.565040] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6be14a7d-2f54-42fe-9f53-8bf32725731f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.571013] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1296.571013] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520e5a0d-a3ea-8dc8-09ae-12ebdfa6a0a5" [ 1296.571013] env[69927]: _type = "Task" [ 1296.571013] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.579496] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520e5a0d-a3ea-8dc8-09ae-12ebdfa6a0a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.729272] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "refresh_cache-6bfafa83-a9e2-4f7d-bbad-6b356f173b68" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.729272] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquired lock "refresh_cache-6bfafa83-a9e2-4f7d-bbad-6b356f173b68" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1296.729272] env[69927]: DEBUG nova.network.neutron [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1296.825979] env[69927]: DEBUG nova.network.neutron [req-fbc1663a-9f6b-49f6-8555-3d6dc8fb8593 req-e5f517aa-4908-4fa8-bc84-56605aed614f service nova] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Updated VIF entry in instance network info cache for port cefb857a-0190-4924-a605-749a4858cef2. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1296.826440] env[69927]: DEBUG nova.network.neutron [req-fbc1663a-9f6b-49f6-8555-3d6dc8fb8593 req-e5f517aa-4908-4fa8-bc84-56605aed614f service nova] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Updating instance_info_cache with network_info: [{"id": "cefb857a-0190-4924-a605-749a4858cef2", "address": "fa:16:3e:8f:db:3a", "network": {"id": "1b67ec75-5b1a-4408-976d-0bd585378451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1628882259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0823381b9f644adf818b490c551f5a3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcefb857a-01", "ovs_interfaceid": "cefb857a-0190-4924-a605-749a4858cef2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1296.923453] env[69927]: DEBUG oslo_vmware.api [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Task: {'id': task-4096964, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138502} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.924330] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1296.924330] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1296.924330] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1296.924330] env[69927]: INFO nova.compute.manager [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1296.924678] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1296.924678] env[69927]: DEBUG nova.compute.manager [-] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1296.924816] env[69927]: DEBUG nova.network.neutron [-] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1296.936021] env[69927]: DEBUG oslo_concurrency.lockutils [None req-28303318-02de-4796-80e4-c437f6467e18 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "b007a697-7da4-4c97-9ccb-046d86b27568" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1296.936260] env[69927]: DEBUG oslo_concurrency.lockutils [None req-28303318-02de-4796-80e4-c437f6467e18 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "b007a697-7da4-4c97-9ccb-046d86b27568" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1297.085916] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520e5a0d-a3ea-8dc8-09ae-12ebdfa6a0a5, 'name': SearchDatastore_Task, 'duration_secs': 0.009284} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.086844] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a177621-d8c4-4657-815b-7d07001187dc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.097596] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1297.097596] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52afe076-6cac-598c-aeae-4927f8b89876" [ 1297.097596] env[69927]: _type = "Task" [ 1297.097596] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.109443] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52afe076-6cac-598c-aeae-4927f8b89876, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.192073] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49af9dc-a5c3-4b6b-be96-4ae09f3ce80c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.202952] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55f6ab8c-2138-410c-9df0-8c1c8ed26cec {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.241201] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c923f446-de37-4d6b-b7af-465ddc6f854f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.250060] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbdcfd0a-0929-44a0-8db1-1f45df4fd6d9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.264936] env[69927]: DEBUG nova.compute.provider_tree [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1297.272146] env[69927]: DEBUG nova.network.neutron [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1297.329546] env[69927]: DEBUG oslo_concurrency.lockutils [req-fbc1663a-9f6b-49f6-8555-3d6dc8fb8593 req-e5f517aa-4908-4fa8-bc84-56605aed614f service nova] Releasing lock "refresh_cache-ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1297.329841] env[69927]: DEBUG nova.compute.manager [req-fbc1663a-9f6b-49f6-8555-3d6dc8fb8593 req-e5f517aa-4908-4fa8-bc84-56605aed614f service nova] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Received event network-vif-deleted-ee634287-1d03-4e08-bdc7-07ab41963c1c {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1297.392080] env[69927]: DEBUG oslo_concurrency.lockutils [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "9e9e93cc-e225-4ec7-850f-916aa078ba30" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1297.392347] env[69927]: DEBUG oslo_concurrency.lockutils [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "9e9e93cc-e225-4ec7-850f-916aa078ba30" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1297.439816] env[69927]: INFO nova.compute.manager [None req-28303318-02de-4796-80e4-c437f6467e18 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Detaching volume 2e7c1ed4-9d5c-4f45-aaf5-2b53acd1d257 [ 1297.450217] env[69927]: DEBUG nova.network.neutron [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Updating instance_info_cache with network_info: [{"id": "cdd401dc-1a64-40f2-931e-f19611bb56d3", "address": "fa:16:3e:fc:81:80", "network": {"id": "bd90fedf-a086-42e6-9814-07044a035f6e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-951403655-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cf6bb3492c642aa9a168e484299289c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdd401dc-1a", "ovs_interfaceid": "cdd401dc-1a64-40f2-931e-f19611bb56d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1297.474931] env[69927]: INFO nova.virt.block_device [None req-28303318-02de-4796-80e4-c437f6467e18 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Attempting to driver detach volume 2e7c1ed4-9d5c-4f45-aaf5-2b53acd1d257 from mountpoint /dev/sdb [ 1297.475184] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-28303318-02de-4796-80e4-c437f6467e18 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Volume detach. Driver type: vmdk {{(pid=69927) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1297.475374] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-28303318-02de-4796-80e4-c437f6467e18 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811595', 'volume_id': '2e7c1ed4-9d5c-4f45-aaf5-2b53acd1d257', 'name': 'volume-2e7c1ed4-9d5c-4f45-aaf5-2b53acd1d257', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b007a697-7da4-4c97-9ccb-046d86b27568', 'attached_at': '', 'detached_at': '', 'volume_id': '2e7c1ed4-9d5c-4f45-aaf5-2b53acd1d257', 'serial': '2e7c1ed4-9d5c-4f45-aaf5-2b53acd1d257'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1297.476494] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6726733f-e42e-4738-97ff-77f6b88f55b3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.499448] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20fcfa3e-032f-44fb-b9ce-970feb4e3eb4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.507578] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1192e47-e998-4f60-923c-9d46998b6c2d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.530687] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecf25249-49aa-405e-9373-fadc3c07e3ea {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.546803] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-28303318-02de-4796-80e4-c437f6467e18 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] The volume has not been displaced from its original location: [datastore1] volume-2e7c1ed4-9d5c-4f45-aaf5-2b53acd1d257/volume-2e7c1ed4-9d5c-4f45-aaf5-2b53acd1d257.vmdk. No consolidation needed. {{(pid=69927) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1297.552218] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-28303318-02de-4796-80e4-c437f6467e18 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Reconfiguring VM instance instance-0000006b to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1297.552612] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c374be8-c696-4338-83c7-5ac95edb7c4d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.573370] env[69927]: DEBUG oslo_vmware.api [None req-28303318-02de-4796-80e4-c437f6467e18 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1297.573370] env[69927]: value = "task-4096965" [ 1297.573370] env[69927]: _type = "Task" [ 1297.573370] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.582825] env[69927]: DEBUG oslo_vmware.api [None req-28303318-02de-4796-80e4-c437f6467e18 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096965, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.608910] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52afe076-6cac-598c-aeae-4927f8b89876, 'name': SearchDatastore_Task, 'duration_secs': 0.012229} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.609240] env[69927]: DEBUG oslo_concurrency.lockutils [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1297.609439] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6/ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1297.610067] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f54eddf9-c36e-4de7-8e48-f9c854d5d859 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.617359] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1297.617359] env[69927]: value = "task-4096966" [ 1297.617359] env[69927]: _type = "Task" [ 1297.617359] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.628018] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4096966, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.768067] env[69927]: DEBUG nova.scheduler.client.report [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1297.871066] env[69927]: DEBUG nova.network.neutron [-] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1297.894370] env[69927]: DEBUG nova.compute.manager [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1297.954058] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Releasing lock "refresh_cache-6bfafa83-a9e2-4f7d-bbad-6b356f173b68" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1297.954428] env[69927]: DEBUG nova.compute.manager [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Instance network_info: |[{"id": "cdd401dc-1a64-40f2-931e-f19611bb56d3", "address": "fa:16:3e:fc:81:80", "network": {"id": "bd90fedf-a086-42e6-9814-07044a035f6e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-951403655-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cf6bb3492c642aa9a168e484299289c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdd401dc-1a", "ovs_interfaceid": "cdd401dc-1a64-40f2-931e-f19611bb56d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1297.954915] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:81:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1cbd5e0e-9116-46f1-9748-13a73d2d7e75', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cdd401dc-1a64-40f2-931e-f19611bb56d3', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1297.969883] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1297.970836] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1297.971229] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a4a800ca-0cc0-4192-8867-e67e2754e7db {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.994270] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1297.994270] env[69927]: value = "task-4096967" [ 1297.994270] env[69927]: _type = "Task" [ 1297.994270] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.004721] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096967, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.086834] env[69927]: DEBUG oslo_vmware.api [None req-28303318-02de-4796-80e4-c437f6467e18 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096965, 'name': ReconfigVM_Task, 'duration_secs': 0.255457} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.086991] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-28303318-02de-4796-80e4-c437f6467e18 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Reconfigured VM instance instance-0000006b to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1298.092264] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94835427-0233-442f-ab48-c64b6c69bd5e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.109696] env[69927]: DEBUG oslo_vmware.api [None req-28303318-02de-4796-80e4-c437f6467e18 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1298.109696] env[69927]: value = "task-4096968" [ 1298.109696] env[69927]: _type = "Task" [ 1298.109696] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.120996] env[69927]: DEBUG oslo_vmware.api [None req-28303318-02de-4796-80e4-c437f6467e18 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096968, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.130560] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4096966, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50912} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.130848] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6/ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1298.131116] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1298.131414] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6869f963-5e4b-4e09-84a1-c2905ad6af10 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.137579] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1298.137579] env[69927]: value = "task-4096969" [ 1298.137579] env[69927]: _type = "Task" [ 1298.137579] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.150758] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4096969, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.252107] env[69927]: DEBUG nova.compute.manager [req-7dbd80af-c594-4c68-8c93-c2df63b20375 req-ae028c68-6f49-4371-9236-ad92df13e6af service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Received event network-changed-cdd401dc-1a64-40f2-931e-f19611bb56d3 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1298.252107] env[69927]: DEBUG nova.compute.manager [req-7dbd80af-c594-4c68-8c93-c2df63b20375 req-ae028c68-6f49-4371-9236-ad92df13e6af service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Refreshing instance network info cache due to event network-changed-cdd401dc-1a64-40f2-931e-f19611bb56d3. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1298.252107] env[69927]: DEBUG oslo_concurrency.lockutils [req-7dbd80af-c594-4c68-8c93-c2df63b20375 req-ae028c68-6f49-4371-9236-ad92df13e6af service nova] Acquiring lock "refresh_cache-6bfafa83-a9e2-4f7d-bbad-6b356f173b68" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1298.252107] env[69927]: DEBUG oslo_concurrency.lockutils [req-7dbd80af-c594-4c68-8c93-c2df63b20375 req-ae028c68-6f49-4371-9236-ad92df13e6af service nova] Acquired lock "refresh_cache-6bfafa83-a9e2-4f7d-bbad-6b356f173b68" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1298.252107] env[69927]: DEBUG nova.network.neutron [req-7dbd80af-c594-4c68-8c93-c2df63b20375 req-ae028c68-6f49-4371-9236-ad92df13e6af service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Refreshing network info cache for port cdd401dc-1a64-40f2-931e-f19611bb56d3 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1298.274537] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.736s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1298.301167] env[69927]: INFO nova.scheduler.client.report [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Deleted allocations for instance eb84d52d-7153-412b-9ed9-4b7986cdfbbf [ 1298.374130] env[69927]: INFO nova.compute.manager [-] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Took 1.45 seconds to deallocate network for instance. [ 1298.413453] env[69927]: DEBUG oslo_concurrency.lockutils [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1298.413732] env[69927]: DEBUG oslo_concurrency.lockutils [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1298.415430] env[69927]: INFO nova.compute.claims [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1298.505202] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096967, 'name': CreateVM_Task, 'duration_secs': 0.407705} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.505202] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1298.505202] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1298.505477] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1298.505638] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1298.505935] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70bf3979-d72b-4500-a772-2d5bf3960c13 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.511024] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1298.511024] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5204c3f5-432e-03ed-f1c1-25ab1e4eeefa" [ 1298.511024] env[69927]: _type = "Task" [ 1298.511024] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.519622] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5204c3f5-432e-03ed-f1c1-25ab1e4eeefa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.620655] env[69927]: DEBUG oslo_vmware.api [None req-28303318-02de-4796-80e4-c437f6467e18 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096968, 'name': ReconfigVM_Task, 'duration_secs': 0.18334} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.621041] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-28303318-02de-4796-80e4-c437f6467e18 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811595', 'volume_id': '2e7c1ed4-9d5c-4f45-aaf5-2b53acd1d257', 'name': 'volume-2e7c1ed4-9d5c-4f45-aaf5-2b53acd1d257', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b007a697-7da4-4c97-9ccb-046d86b27568', 'attached_at': '', 'detached_at': '', 'volume_id': '2e7c1ed4-9d5c-4f45-aaf5-2b53acd1d257', 'serial': '2e7c1ed4-9d5c-4f45-aaf5-2b53acd1d257'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1298.648536] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4096969, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079435} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.648800] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1298.649714] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef0224d-561a-4b77-afe9-2675f68d89fb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.680227] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6/ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1298.680694] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7215064-6911-4921-8b34-9b6c97c6f3a8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.706797] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1298.706797] env[69927]: value = "task-4096970" [ 1298.706797] env[69927]: _type = "Task" [ 1298.706797] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.715092] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4096970, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.811660] env[69927]: DEBUG oslo_concurrency.lockutils [None req-1e99f228-fd69-4407-9846-391aaa8e988a tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "eb84d52d-7153-412b-9ed9-4b7986cdfbbf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.209s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1298.882017] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1298.980128] env[69927]: DEBUG nova.network.neutron [req-7dbd80af-c594-4c68-8c93-c2df63b20375 req-ae028c68-6f49-4371-9236-ad92df13e6af service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Updated VIF entry in instance network info cache for port cdd401dc-1a64-40f2-931e-f19611bb56d3. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1298.980572] env[69927]: DEBUG nova.network.neutron [req-7dbd80af-c594-4c68-8c93-c2df63b20375 req-ae028c68-6f49-4371-9236-ad92df13e6af service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Updating instance_info_cache with network_info: [{"id": "cdd401dc-1a64-40f2-931e-f19611bb56d3", "address": "fa:16:3e:fc:81:80", "network": {"id": "bd90fedf-a086-42e6-9814-07044a035f6e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-951403655-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cf6bb3492c642aa9a168e484299289c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdd401dc-1a", "ovs_interfaceid": "cdd401dc-1a64-40f2-931e-f19611bb56d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1299.022809] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5204c3f5-432e-03ed-f1c1-25ab1e4eeefa, 'name': SearchDatastore_Task, 'duration_secs': 0.009487} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.023134] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1299.023545] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1299.023775] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1299.023775] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1299.023915] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1299.024638] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-362b6b11-b417-4628-93f7-d7c78746872d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.033866] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1299.034077] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1299.034805] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9649b6a-c5bd-4a27-b4f3-a88d195ade10 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.041697] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1299.041697] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52928ff0-7e52-3dfc-d371-5ad41ba4cbc2" [ 1299.041697] env[69927]: _type = "Task" [ 1299.041697] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.050777] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52928ff0-7e52-3dfc-d371-5ad41ba4cbc2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.170930] env[69927]: DEBUG nova.objects.instance [None req-28303318-02de-4796-80e4-c437f6467e18 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lazy-loading 'flavor' on Instance uuid b007a697-7da4-4c97-9ccb-046d86b27568 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1299.218574] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4096970, 'name': ReconfigVM_Task, 'duration_secs': 0.322628} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.218934] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Reconfigured VM instance instance-00000076 to attach disk [datastore1] ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6/ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1299.219802] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ea1e8fb3-d2b8-4eb7-bf06-b2988eb6d5c0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.227858] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1299.227858] env[69927]: value = "task-4096971" [ 1299.227858] env[69927]: _type = "Task" [ 1299.227858] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.235754] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4096971, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.485618] env[69927]: DEBUG oslo_concurrency.lockutils [req-7dbd80af-c594-4c68-8c93-c2df63b20375 req-ae028c68-6f49-4371-9236-ad92df13e6af service nova] Releasing lock "refresh_cache-6bfafa83-a9e2-4f7d-bbad-6b356f173b68" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1299.485866] env[69927]: DEBUG nova.compute.manager [req-7dbd80af-c594-4c68-8c93-c2df63b20375 req-ae028c68-6f49-4371-9236-ad92df13e6af service nova] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Received event network-vif-deleted-d201dadc-ab89-4ede-8c29-41217e3af341 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1299.510012] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1299.510115] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1299.510758] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1299.510758] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1299.510758] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1299.512766] env[69927]: INFO nova.compute.manager [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Terminating instance [ 1299.547954] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f3ff48-2718-4269-bd8a-7fe64aad73c9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.554280] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52928ff0-7e52-3dfc-d371-5ad41ba4cbc2, 'name': SearchDatastore_Task, 'duration_secs': 0.009596} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.555346] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fc55468-b9d7-4921-95a5-ba3e93da7b22 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.560341] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090db5a6-7abe-4355-b2f8-8d23f014989e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.564504] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1299.564504] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b14c94-403c-fde9-b361-aadbc864421f" [ 1299.564504] env[69927]: _type = "Task" [ 1299.564504] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.594224] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b7d9f3-5618-4fd8-8247-c728e7a58872 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.600242] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52b14c94-403c-fde9-b361-aadbc864421f, 'name': SearchDatastore_Task, 'duration_secs': 0.010026} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.600877] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1299.601163] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 6bfafa83-a9e2-4f7d-bbad-6b356f173b68/6bfafa83-a9e2-4f7d-bbad-6b356f173b68.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1299.601420] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6b4f29a-e70a-4e5c-9502-30639e39f613 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.606682] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec66ffda-2765-4748-845b-029a93b22265 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.611484] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1299.611484] env[69927]: value = "task-4096972" [ 1299.611484] env[69927]: _type = "Task" [ 1299.611484] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.626041] env[69927]: DEBUG nova.compute.provider_tree [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1299.630563] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096972, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.737657] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4096971, 'name': Rename_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.018849] env[69927]: DEBUG nova.compute.manager [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1300.018849] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1300.019569] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a09d748-5bdd-44a8-af18-af0034d08605 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.027622] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1300.027868] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e695be3e-00ac-4a34-8e4d-2ec0de79368f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.034945] env[69927]: DEBUG oslo_vmware.api [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1300.034945] env[69927]: value = "task-4096973" [ 1300.034945] env[69927]: _type = "Task" [ 1300.034945] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.043836] env[69927]: DEBUG oslo_vmware.api [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096973, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.121723] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096972, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.127876] env[69927]: DEBUG nova.scheduler.client.report [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1300.179097] env[69927]: DEBUG oslo_concurrency.lockutils [None req-28303318-02de-4796-80e4-c437f6467e18 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "b007a697-7da4-4c97-9ccb-046d86b27568" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.242s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1300.241986] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4096971, 'name': Rename_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.548568] env[69927]: DEBUG oslo_vmware.api [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096973, 'name': PowerOffVM_Task, 'duration_secs': 0.227338} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.548971] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1300.549279] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1300.549594] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7e75fbd4-02b9-405e-8f87-4130cf03678f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.627171] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096972, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.629291] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1300.629572] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1300.629909] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Deleting the datastore file [datastore2] b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1300.630456] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-97314dec-2f9c-4b22-8859-e3c307b06410 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.634481] env[69927]: DEBUG oslo_concurrency.lockutils [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.220s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1300.634950] env[69927]: DEBUG nova.compute.manager [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1300.638199] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.756s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1300.638495] env[69927]: DEBUG nova.objects.instance [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lazy-loading 'resources' on Instance uuid 693a6c6b-8d1c-405e-bb17-73259e28f556 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1300.646720] env[69927]: DEBUG oslo_vmware.api [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for the task: (returnval){ [ 1300.646720] env[69927]: value = "task-4096975" [ 1300.646720] env[69927]: _type = "Task" [ 1300.646720] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.658976] env[69927]: DEBUG oslo_vmware.api [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096975, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.744890] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4096971, 'name': Rename_Task, 'duration_secs': 1.155861} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.745285] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1300.745560] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-78edeab1-39bf-4e82-ace2-9f9a9391f3e3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.756732] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1300.756732] env[69927]: value = "task-4096976" [ 1300.756732] env[69927]: _type = "Task" [ 1300.756732] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.768375] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4096976, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.124175] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096972, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.240222} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.124481] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 6bfafa83-a9e2-4f7d-bbad-6b356f173b68/6bfafa83-a9e2-4f7d-bbad-6b356f173b68.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1301.124828] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1301.125053] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f070aa3a-e7c8-404b-a4da-b68a916a37fa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.132707] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1301.132707] env[69927]: value = "task-4096977" [ 1301.132707] env[69927]: _type = "Task" [ 1301.132707] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.144391] env[69927]: DEBUG nova.compute.utils [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1301.150147] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096977, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.150667] env[69927]: DEBUG nova.compute.manager [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1301.150849] env[69927]: DEBUG nova.network.neutron [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1301.163030] env[69927]: DEBUG oslo_vmware.api [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Task: {'id': task-4096975, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.291611} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.163223] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1301.163414] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1301.164183] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1301.164183] env[69927]: INFO nova.compute.manager [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1301.164183] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1301.166855] env[69927]: DEBUG nova.compute.manager [-] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1301.166979] env[69927]: DEBUG nova.network.neutron [-] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1301.196019] env[69927]: DEBUG nova.policy [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d854e5435164764b5b69b9c7262398f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dee421c661394f3fbf8d69a575f095a9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1301.268039] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4096976, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.293778] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4efcff25-f311-4c20-8f8e-95f4aabd07f9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.301860] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b7949da-5281-4b99-b43e-28980af9e530 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.335411] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "b007a697-7da4-4c97-9ccb-046d86b27568" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1301.335728] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "b007a697-7da4-4c97-9ccb-046d86b27568" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1301.335857] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "b007a697-7da4-4c97-9ccb-046d86b27568-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1301.336046] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "b007a697-7da4-4c97-9ccb-046d86b27568-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1301.336220] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "b007a697-7da4-4c97-9ccb-046d86b27568-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1301.338338] env[69927]: INFO nova.compute.manager [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Terminating instance [ 1301.342189] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7108feb6-d086-42ee-96ca-d9a2a2519b6b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.352014] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca954c88-c849-4a73-bf6c-24d866a64f07 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.367299] env[69927]: DEBUG nova.compute.provider_tree [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1301.448019] env[69927]: DEBUG nova.compute.manager [req-aa462947-5764-4731-b1bc-716236e8023d req-683e0917-db76-41a6-baf7-bda40a9fe2f5 service nova] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Received event network-vif-deleted-885a1391-dd28-4b0c-ae60-7ae0c571a32a {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1301.449567] env[69927]: INFO nova.compute.manager [req-aa462947-5764-4731-b1bc-716236e8023d req-683e0917-db76-41a6-baf7-bda40a9fe2f5 service nova] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Neutron deleted interface 885a1391-dd28-4b0c-ae60-7ae0c571a32a; detaching it from the instance and deleting it from the info cache [ 1301.449567] env[69927]: DEBUG nova.network.neutron [req-aa462947-5764-4731-b1bc-716236e8023d req-683e0917-db76-41a6-baf7-bda40a9fe2f5 service nova] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1301.586416] env[69927]: DEBUG nova.network.neutron [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Successfully created port: 212263f4-2ce1-4c5f-a203-887ea0b690a0 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1301.642677] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096977, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.102884} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.643031] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1301.644773] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca1ec436-7a20-430d-892f-25e89cb06e7d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.646362] env[69927]: DEBUG nova.compute.manager [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1301.675409] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] 6bfafa83-a9e2-4f7d-bbad-6b356f173b68/6bfafa83-a9e2-4f7d-bbad-6b356f173b68.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1301.675600] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd73bdcc-eb4f-402d-b622-078eab953917 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.697666] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1301.697666] env[69927]: value = "task-4096978" [ 1301.697666] env[69927]: _type = "Task" [ 1301.697666] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.707525] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096978, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.768480] env[69927]: DEBUG oslo_vmware.api [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4096976, 'name': PowerOnVM_Task, 'duration_secs': 0.515219} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.768758] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1301.768960] env[69927]: INFO nova.compute.manager [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Took 8.44 seconds to spawn the instance on the hypervisor. [ 1301.769160] env[69927]: DEBUG nova.compute.manager [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1301.769944] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b78b7869-b090-42e7-8870-5553bc4b323d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.842345] env[69927]: DEBUG nova.compute.manager [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1301.842527] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1301.843422] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a658e534-bc9b-455e-b72f-f9965fb7f7f3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.852339] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1301.852587] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-919a21b4-c749-4486-b936-3cdaef457366 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.858597] env[69927]: DEBUG oslo_vmware.api [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1301.858597] env[69927]: value = "task-4096979" [ 1301.858597] env[69927]: _type = "Task" [ 1301.858597] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.869038] env[69927]: DEBUG oslo_vmware.api [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096979, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.869966] env[69927]: DEBUG nova.scheduler.client.report [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1301.921481] env[69927]: DEBUG nova.network.neutron [-] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1301.952336] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-22131d7c-ef42-44af-bd09-8d9223ef9c99 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.962652] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3857015e-ea27-41a6-82f6-20ca60ba8bfb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.992631] env[69927]: DEBUG nova.compute.manager [req-aa462947-5764-4731-b1bc-716236e8023d req-683e0917-db76-41a6-baf7-bda40a9fe2f5 service nova] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Detach interface failed, port_id=885a1391-dd28-4b0c-ae60-7ae0c571a32a, reason: Instance b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1302.210021] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096978, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.287561] env[69927]: INFO nova.compute.manager [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Took 13.22 seconds to build instance. [ 1302.375407] env[69927]: DEBUG oslo_vmware.api [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096979, 'name': PowerOffVM_Task, 'duration_secs': 0.207059} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.375407] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.736s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1302.376921] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1302.377235] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1302.378572] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-979a3721-b19e-4b63-8d30-f2f2d951702b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.408190] env[69927]: INFO nova.scheduler.client.report [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Deleted allocations for instance 693a6c6b-8d1c-405e-bb17-73259e28f556 [ 1302.425027] env[69927]: INFO nova.compute.manager [-] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Took 1.26 seconds to deallocate network for instance. [ 1302.474787] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1302.474787] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1302.474787] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Deleting the datastore file [datastore2] b007a697-7da4-4c97-9ccb-046d86b27568 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1302.475088] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fce208cb-5bc7-49f8-b535-1f955b7c527c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.483607] env[69927]: DEBUG oslo_vmware.api [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1302.483607] env[69927]: value = "task-4096981" [ 1302.483607] env[69927]: _type = "Task" [ 1302.483607] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.492719] env[69927]: DEBUG oslo_vmware.api [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096981, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.659214] env[69927]: DEBUG nova.compute.manager [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1302.688601] env[69927]: DEBUG nova.virt.hardware [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1302.689074] env[69927]: DEBUG nova.virt.hardware [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1302.689074] env[69927]: DEBUG nova.virt.hardware [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1302.689256] env[69927]: DEBUG nova.virt.hardware [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1302.689424] env[69927]: DEBUG nova.virt.hardware [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1302.689506] env[69927]: DEBUG nova.virt.hardware [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1302.689695] env[69927]: DEBUG nova.virt.hardware [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1302.689860] env[69927]: DEBUG nova.virt.hardware [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1302.690150] env[69927]: DEBUG nova.virt.hardware [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1302.690385] env[69927]: DEBUG nova.virt.hardware [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1302.690573] env[69927]: DEBUG nova.virt.hardware [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1302.691476] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-620cd14c-da45-4e4f-ba2e-5263f22ec42f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.704301] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe2e7530-1815-49a9-ac5d-d67015abc310 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.723658] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096978, 'name': ReconfigVM_Task, 'duration_secs': 0.751383} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.723658] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Reconfigured VM instance instance-00000077 to attach disk [datastore1] 6bfafa83-a9e2-4f7d-bbad-6b356f173b68/6bfafa83-a9e2-4f7d-bbad-6b356f173b68.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1302.723852] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-775041f2-5e35-4e00-a471-d245d6129645 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.731221] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1302.731221] env[69927]: value = "task-4096982" [ 1302.731221] env[69927]: _type = "Task" [ 1302.731221] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.739920] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096982, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.789748] env[69927]: DEBUG oslo_concurrency.lockutils [None req-96b304ee-8117-4198-8226-8d2804340ea2 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.724s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1302.918211] env[69927]: DEBUG oslo_concurrency.lockutils [None req-bc7c6606-401f-400a-a535-7c975c2f64a5 tempest-AttachVolumeShelveTestJSON-389172843 tempest-AttachVolumeShelveTestJSON-389172843-project-member] Lock "693a6c6b-8d1c-405e-bb17-73259e28f556" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.650s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1302.932786] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1302.933092] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1302.933461] env[69927]: DEBUG nova.objects.instance [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lazy-loading 'resources' on Instance uuid b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1302.998301] env[69927]: DEBUG oslo_vmware.api [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096981, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.353793} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.000800] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1303.001152] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1303.001542] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1303.001542] env[69927]: INFO nova.compute.manager [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1303.001882] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1303.002814] env[69927]: DEBUG nova.compute.manager [-] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1303.002958] env[69927]: DEBUG nova.network.neutron [-] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1303.010661] env[69927]: DEBUG nova.compute.manager [req-5273782b-26cd-4bf0-8337-db25bb7101ae req-5e1c8b77-6e14-483b-bc30-e8452ce89f6c service nova] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Received event network-vif-plugged-212263f4-2ce1-4c5f-a203-887ea0b690a0 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1303.010881] env[69927]: DEBUG oslo_concurrency.lockutils [req-5273782b-26cd-4bf0-8337-db25bb7101ae req-5e1c8b77-6e14-483b-bc30-e8452ce89f6c service nova] Acquiring lock "9e9e93cc-e225-4ec7-850f-916aa078ba30-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1303.011136] env[69927]: DEBUG oslo_concurrency.lockutils [req-5273782b-26cd-4bf0-8337-db25bb7101ae req-5e1c8b77-6e14-483b-bc30-e8452ce89f6c service nova] Lock "9e9e93cc-e225-4ec7-850f-916aa078ba30-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1303.011317] env[69927]: DEBUG oslo_concurrency.lockutils [req-5273782b-26cd-4bf0-8337-db25bb7101ae req-5e1c8b77-6e14-483b-bc30-e8452ce89f6c service nova] Lock "9e9e93cc-e225-4ec7-850f-916aa078ba30-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1303.011484] env[69927]: DEBUG nova.compute.manager [req-5273782b-26cd-4bf0-8337-db25bb7101ae req-5e1c8b77-6e14-483b-bc30-e8452ce89f6c service nova] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] No waiting events found dispatching network-vif-plugged-212263f4-2ce1-4c5f-a203-887ea0b690a0 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1303.011645] env[69927]: WARNING nova.compute.manager [req-5273782b-26cd-4bf0-8337-db25bb7101ae req-5e1c8b77-6e14-483b-bc30-e8452ce89f6c service nova] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Received unexpected event network-vif-plugged-212263f4-2ce1-4c5f-a203-887ea0b690a0 for instance with vm_state building and task_state spawning. [ 1303.139539] env[69927]: DEBUG nova.network.neutron [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Successfully updated port: 212263f4-2ce1-4c5f-a203-887ea0b690a0 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1303.241957] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096982, 'name': Rename_Task, 'duration_secs': 0.247834} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.242390] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1303.242697] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-912259e5-4c1d-4369-bb95-e8e1890a75b9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.249748] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1303.249748] env[69927]: value = "task-4096983" [ 1303.249748] env[69927]: _type = "Task" [ 1303.249748] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.258477] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096983, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.490277] env[69927]: DEBUG nova.compute.manager [req-dae85b67-8f08-49cb-b89d-af52074c03c9 req-36fb484c-5ad5-4d74-9760-be97eb405aeb service nova] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Received event network-changed-cefb857a-0190-4924-a605-749a4858cef2 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1303.490640] env[69927]: DEBUG nova.compute.manager [req-dae85b67-8f08-49cb-b89d-af52074c03c9 req-36fb484c-5ad5-4d74-9760-be97eb405aeb service nova] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Refreshing instance network info cache due to event network-changed-cefb857a-0190-4924-a605-749a4858cef2. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1303.491033] env[69927]: DEBUG oslo_concurrency.lockutils [req-dae85b67-8f08-49cb-b89d-af52074c03c9 req-36fb484c-5ad5-4d74-9760-be97eb405aeb service nova] Acquiring lock "refresh_cache-ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1303.491116] env[69927]: DEBUG oslo_concurrency.lockutils [req-dae85b67-8f08-49cb-b89d-af52074c03c9 req-36fb484c-5ad5-4d74-9760-be97eb405aeb service nova] Acquired lock "refresh_cache-ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1303.491754] env[69927]: DEBUG nova.network.neutron [req-dae85b67-8f08-49cb-b89d-af52074c03c9 req-36fb484c-5ad5-4d74-9760-be97eb405aeb service nova] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Refreshing network info cache for port cefb857a-0190-4924-a605-749a4858cef2 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1303.561950] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d75f309-8716-4494-951b-66c87865fad1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.570116] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dadd0059-144b-435a-9b08-5ae551491545 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.602522] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6733f924-50e7-4454-88fb-3ee1b762cfc6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.611075] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d08adc0b-aca0-4400-9a52-e64cf7b60974 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.625738] env[69927]: DEBUG nova.compute.provider_tree [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1303.644208] env[69927]: DEBUG oslo_concurrency.lockutils [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "refresh_cache-9e9e93cc-e225-4ec7-850f-916aa078ba30" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1303.644208] env[69927]: DEBUG oslo_concurrency.lockutils [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired lock "refresh_cache-9e9e93cc-e225-4ec7-850f-916aa078ba30" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1303.644208] env[69927]: DEBUG nova.network.neutron [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1303.761973] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096983, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.952565] env[69927]: DEBUG nova.network.neutron [-] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1304.128447] env[69927]: DEBUG nova.scheduler.client.report [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1304.178202] env[69927]: DEBUG nova.network.neutron [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1304.224625] env[69927]: DEBUG nova.network.neutron [req-dae85b67-8f08-49cb-b89d-af52074c03c9 req-36fb484c-5ad5-4d74-9760-be97eb405aeb service nova] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Updated VIF entry in instance network info cache for port cefb857a-0190-4924-a605-749a4858cef2. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1304.224992] env[69927]: DEBUG nova.network.neutron [req-dae85b67-8f08-49cb-b89d-af52074c03c9 req-36fb484c-5ad5-4d74-9760-be97eb405aeb service nova] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Updating instance_info_cache with network_info: [{"id": "cefb857a-0190-4924-a605-749a4858cef2", "address": "fa:16:3e:8f:db:3a", "network": {"id": "1b67ec75-5b1a-4408-976d-0bd585378451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1628882259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0823381b9f644adf818b490c551f5a3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcefb857a-01", "ovs_interfaceid": "cefb857a-0190-4924-a605-749a4858cef2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1304.260829] env[69927]: DEBUG oslo_vmware.api [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096983, 'name': PowerOnVM_Task, 'duration_secs': 0.754472} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.261318] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1304.261544] env[69927]: INFO nova.compute.manager [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Took 8.70 seconds to spawn the instance on the hypervisor. [ 1304.261749] env[69927]: DEBUG nova.compute.manager [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1304.262570] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1232cae-9dde-4788-b1c8-a79788428b5f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.455573] env[69927]: INFO nova.compute.manager [-] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Took 1.45 seconds to deallocate network for instance. [ 1304.623807] env[69927]: DEBUG nova.network.neutron [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Updating instance_info_cache with network_info: [{"id": "212263f4-2ce1-4c5f-a203-887ea0b690a0", "address": "fa:16:3e:f3:03:aa", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap212263f4-2c", "ovs_interfaceid": "212263f4-2ce1-4c5f-a203-887ea0b690a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1304.634192] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.701s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1304.661286] env[69927]: INFO nova.scheduler.client.report [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Deleted allocations for instance b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a [ 1304.728099] env[69927]: DEBUG oslo_concurrency.lockutils [req-dae85b67-8f08-49cb-b89d-af52074c03c9 req-36fb484c-5ad5-4d74-9760-be97eb405aeb service nova] Releasing lock "refresh_cache-ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1304.728387] env[69927]: DEBUG nova.compute.manager [req-dae85b67-8f08-49cb-b89d-af52074c03c9 req-36fb484c-5ad5-4d74-9760-be97eb405aeb service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Received event network-vif-deleted-037faf17-cf20-417c-ab4d-b0a08944b7d9 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1304.728569] env[69927]: INFO nova.compute.manager [req-dae85b67-8f08-49cb-b89d-af52074c03c9 req-36fb484c-5ad5-4d74-9760-be97eb405aeb service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Neutron deleted interface 037faf17-cf20-417c-ab4d-b0a08944b7d9; detaching it from the instance and deleting it from the info cache [ 1304.728750] env[69927]: DEBUG nova.network.neutron [req-dae85b67-8f08-49cb-b89d-af52074c03c9 req-36fb484c-5ad5-4d74-9760-be97eb405aeb service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1304.781266] env[69927]: INFO nova.compute.manager [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Took 14.91 seconds to build instance. [ 1304.963179] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1304.963526] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1304.963791] env[69927]: DEBUG nova.objects.instance [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lazy-loading 'resources' on Instance uuid b007a697-7da4-4c97-9ccb-046d86b27568 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1305.051064] env[69927]: DEBUG nova.compute.manager [req-090e1924-e1be-49f2-bd2f-23ee4f3e9048 req-fbd7177c-eedf-4c60-bef1-3ab849dd0566 service nova] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Received event network-changed-212263f4-2ce1-4c5f-a203-887ea0b690a0 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1305.051205] env[69927]: DEBUG nova.compute.manager [req-090e1924-e1be-49f2-bd2f-23ee4f3e9048 req-fbd7177c-eedf-4c60-bef1-3ab849dd0566 service nova] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Refreshing instance network info cache due to event network-changed-212263f4-2ce1-4c5f-a203-887ea0b690a0. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1305.051471] env[69927]: DEBUG oslo_concurrency.lockutils [req-090e1924-e1be-49f2-bd2f-23ee4f3e9048 req-fbd7177c-eedf-4c60-bef1-3ab849dd0566 service nova] Acquiring lock "refresh_cache-9e9e93cc-e225-4ec7-850f-916aa078ba30" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1305.126828] env[69927]: DEBUG oslo_concurrency.lockutils [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Releasing lock "refresh_cache-9e9e93cc-e225-4ec7-850f-916aa078ba30" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1305.127186] env[69927]: DEBUG nova.compute.manager [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Instance network_info: |[{"id": "212263f4-2ce1-4c5f-a203-887ea0b690a0", "address": "fa:16:3e:f3:03:aa", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap212263f4-2c", "ovs_interfaceid": "212263f4-2ce1-4c5f-a203-887ea0b690a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1305.127490] env[69927]: DEBUG oslo_concurrency.lockutils [req-090e1924-e1be-49f2-bd2f-23ee4f3e9048 req-fbd7177c-eedf-4c60-bef1-3ab849dd0566 service nova] Acquired lock "refresh_cache-9e9e93cc-e225-4ec7-850f-916aa078ba30" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1305.127702] env[69927]: DEBUG nova.network.neutron [req-090e1924-e1be-49f2-bd2f-23ee4f3e9048 req-fbd7177c-eedf-4c60-bef1-3ab849dd0566 service nova] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Refreshing network info cache for port 212263f4-2ce1-4c5f-a203-887ea0b690a0 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1305.129029] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:03:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '57691231-2b8d-4d71-8f79-d4a6a1d95ec8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '212263f4-2ce1-4c5f-a203-887ea0b690a0', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1305.136846] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1305.137999] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1305.138258] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-322fe835-2acb-4451-910d-9f97b7b19415 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.161537] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1305.161537] env[69927]: value = "task-4096985" [ 1305.161537] env[69927]: _type = "Task" [ 1305.161537] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.168371] env[69927]: DEBUG oslo_concurrency.lockutils [None req-e3c06154-7fe3-45e5-8760-f7ded9bb8043 tempest-ServersTestJSON-738065696 tempest-ServersTestJSON-738065696-project-member] Lock "b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.658s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1305.173013] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096985, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.232486] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a449b9ab-e0fc-4081-8197-cb8a560f0120 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.243434] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-240fb90a-64c4-4225-8612-4f51c000bbdf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.274848] env[69927]: DEBUG nova.compute.manager [req-dae85b67-8f08-49cb-b89d-af52074c03c9 req-36fb484c-5ad5-4d74-9760-be97eb405aeb service nova] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Detach interface failed, port_id=037faf17-cf20-417c-ab4d-b0a08944b7d9, reason: Instance b007a697-7da4-4c97-9ccb-046d86b27568 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1305.284849] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c6ab4346-da19-4625-bb22-df2e235ddcc9 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "6bfafa83-a9e2-4f7d-bbad-6b356f173b68" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.423s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1305.588950] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4fcf931-5326-4234-919c-40641aa89265 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.597281] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e3a0f34-c086-4c87-99ea-527879463fec {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.631128] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3afd5134-b3aa-402d-8d0e-ce6b896588a2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.641818] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f15cb6df-3af0-4519-855f-3a584eb385d4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.659064] env[69927]: DEBUG nova.compute.provider_tree [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1305.673816] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096985, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.890572] env[69927]: DEBUG nova.network.neutron [req-090e1924-e1be-49f2-bd2f-23ee4f3e9048 req-fbd7177c-eedf-4c60-bef1-3ab849dd0566 service nova] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Updated VIF entry in instance network info cache for port 212263f4-2ce1-4c5f-a203-887ea0b690a0. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1305.890977] env[69927]: DEBUG nova.network.neutron [req-090e1924-e1be-49f2-bd2f-23ee4f3e9048 req-fbd7177c-eedf-4c60-bef1-3ab849dd0566 service nova] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Updating instance_info_cache with network_info: [{"id": "212263f4-2ce1-4c5f-a203-887ea0b690a0", "address": "fa:16:3e:f3:03:aa", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap212263f4-2c", "ovs_interfaceid": "212263f4-2ce1-4c5f-a203-887ea0b690a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1306.166467] env[69927]: DEBUG nova.scheduler.client.report [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1306.179447] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096985, 'name': CreateVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.394109] env[69927]: DEBUG oslo_concurrency.lockutils [req-090e1924-e1be-49f2-bd2f-23ee4f3e9048 req-fbd7177c-eedf-4c60-bef1-3ab849dd0566 service nova] Releasing lock "refresh_cache-9e9e93cc-e225-4ec7-850f-916aa078ba30" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1306.673955] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.710s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1306.683119] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4096985, 'name': CreateVM_Task, 'duration_secs': 1.388235} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.683306] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1306.684051] env[69927]: DEBUG oslo_concurrency.lockutils [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1306.684214] env[69927]: DEBUG oslo_concurrency.lockutils [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1306.684544] env[69927]: DEBUG oslo_concurrency.lockutils [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1306.684946] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ff27e61-648a-41c8-ad7b-d39d28744d1c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.692358] env[69927]: DEBUG oslo_vmware.api [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1306.692358] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526f2068-2880-dc6c-a682-c0f946f95e1f" [ 1306.692358] env[69927]: _type = "Task" [ 1306.692358] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.702268] env[69927]: DEBUG oslo_vmware.api [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526f2068-2880-dc6c-a682-c0f946f95e1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.710900] env[69927]: INFO nova.scheduler.client.report [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Deleted allocations for instance b007a697-7da4-4c97-9ccb-046d86b27568 [ 1307.087182] env[69927]: DEBUG nova.compute.manager [req-13ac3939-daae-474c-ac05-dc53c31fa174 req-06596e09-ffa4-4462-ab0e-37803a375da3 service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Received event network-changed-cdd401dc-1a64-40f2-931e-f19611bb56d3 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1307.087481] env[69927]: DEBUG nova.compute.manager [req-13ac3939-daae-474c-ac05-dc53c31fa174 req-06596e09-ffa4-4462-ab0e-37803a375da3 service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Refreshing instance network info cache due to event network-changed-cdd401dc-1a64-40f2-931e-f19611bb56d3. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1307.091239] env[69927]: DEBUG oslo_concurrency.lockutils [req-13ac3939-daae-474c-ac05-dc53c31fa174 req-06596e09-ffa4-4462-ab0e-37803a375da3 service nova] Acquiring lock "refresh_cache-6bfafa83-a9e2-4f7d-bbad-6b356f173b68" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1307.091239] env[69927]: DEBUG oslo_concurrency.lockutils [req-13ac3939-daae-474c-ac05-dc53c31fa174 req-06596e09-ffa4-4462-ab0e-37803a375da3 service nova] Acquired lock "refresh_cache-6bfafa83-a9e2-4f7d-bbad-6b356f173b68" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1307.091362] env[69927]: DEBUG nova.network.neutron [req-13ac3939-daae-474c-ac05-dc53c31fa174 req-06596e09-ffa4-4462-ab0e-37803a375da3 service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Refreshing network info cache for port cdd401dc-1a64-40f2-931e-f19611bb56d3 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1307.210050] env[69927]: DEBUG oslo_vmware.api [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526f2068-2880-dc6c-a682-c0f946f95e1f, 'name': SearchDatastore_Task, 'duration_secs': 0.012096} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.210742] env[69927]: DEBUG oslo_concurrency.lockutils [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1307.210941] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1307.213967] env[69927]: DEBUG oslo_concurrency.lockutils [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1307.213967] env[69927]: DEBUG oslo_concurrency.lockutils [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1307.213967] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1307.215222] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aa736cd5-9411-46ac-82ed-1f60c8ae305f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.224225] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d54f815b-c114-499a-b8f3-4cda02c8007e tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "b007a697-7da4-4c97-9ccb-046d86b27568" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.888s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1307.232958] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1307.235486] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1307.236246] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6b3f699-96be-4082-8ea9-f51f0792320f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.248403] env[69927]: DEBUG oslo_vmware.api [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1307.248403] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5270617e-e63b-542c-061c-c406317913d3" [ 1307.248403] env[69927]: _type = "Task" [ 1307.248403] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.258601] env[69927]: DEBUG oslo_vmware.api [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5270617e-e63b-542c-061c-c406317913d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.764528] env[69927]: DEBUG oslo_vmware.api [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5270617e-e63b-542c-061c-c406317913d3, 'name': SearchDatastore_Task, 'duration_secs': 0.018766} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.766064] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a0819db-2ada-427c-9da5-61763bce5923 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.779017] env[69927]: DEBUG oslo_vmware.api [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1307.779017] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526a717a-f26b-6f3e-53fd-1a964588fe3a" [ 1307.779017] env[69927]: _type = "Task" [ 1307.779017] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.792877] env[69927]: DEBUG oslo_vmware.api [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526a717a-f26b-6f3e-53fd-1a964588fe3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.931617] env[69927]: DEBUG nova.network.neutron [req-13ac3939-daae-474c-ac05-dc53c31fa174 req-06596e09-ffa4-4462-ab0e-37803a375da3 service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Updated VIF entry in instance network info cache for port cdd401dc-1a64-40f2-931e-f19611bb56d3. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1307.932435] env[69927]: DEBUG nova.network.neutron [req-13ac3939-daae-474c-ac05-dc53c31fa174 req-06596e09-ffa4-4462-ab0e-37803a375da3 service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Updating instance_info_cache with network_info: [{"id": "cdd401dc-1a64-40f2-931e-f19611bb56d3", "address": "fa:16:3e:fc:81:80", "network": {"id": "bd90fedf-a086-42e6-9814-07044a035f6e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-951403655-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cf6bb3492c642aa9a168e484299289c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdd401dc-1a", "ovs_interfaceid": "cdd401dc-1a64-40f2-931e-f19611bb56d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1308.290527] env[69927]: DEBUG oslo_vmware.api [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526a717a-f26b-6f3e-53fd-1a964588fe3a, 'name': SearchDatastore_Task, 'duration_secs': 0.011944} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.290788] env[69927]: DEBUG oslo_concurrency.lockutils [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1308.291377] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 9e9e93cc-e225-4ec7-850f-916aa078ba30/9e9e93cc-e225-4ec7-850f-916aa078ba30.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1308.291573] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7a4a8a3b-3143-47b9-bddd-884f5c296bd8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.299025] env[69927]: DEBUG oslo_vmware.api [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1308.299025] env[69927]: value = "task-4096987" [ 1308.299025] env[69927]: _type = "Task" [ 1308.299025] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.308218] env[69927]: DEBUG oslo_vmware.api [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096987, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.436170] env[69927]: DEBUG oslo_concurrency.lockutils [req-13ac3939-daae-474c-ac05-dc53c31fa174 req-06596e09-ffa4-4462-ab0e-37803a375da3 service nova] Releasing lock "refresh_cache-6bfafa83-a9e2-4f7d-bbad-6b356f173b68" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1308.809028] env[69927]: DEBUG oslo_vmware.api [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096987, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.310988] env[69927]: DEBUG oslo_vmware.api [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096987, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530131} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.311430] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 9e9e93cc-e225-4ec7-850f-916aa078ba30/9e9e93cc-e225-4ec7-850f-916aa078ba30.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1309.311530] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1309.311732] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8460e636-c2eb-424d-bc7a-d0cf91798981 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.319641] env[69927]: DEBUG oslo_vmware.api [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1309.319641] env[69927]: value = "task-4096988" [ 1309.319641] env[69927]: _type = "Task" [ 1309.319641] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.327933] env[69927]: DEBUG oslo_vmware.api [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096988, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.830132] env[69927]: DEBUG oslo_vmware.api [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096988, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067286} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.830132] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1309.830499] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5beda30-8576-4983-9889-18612e138b6e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.853145] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] 9e9e93cc-e225-4ec7-850f-916aa078ba30/9e9e93cc-e225-4ec7-850f-916aa078ba30.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1309.853482] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c802cea-228a-4aba-81b0-684ff34f4772 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.875556] env[69927]: DEBUG oslo_vmware.api [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1309.875556] env[69927]: value = "task-4096989" [ 1309.875556] env[69927]: _type = "Task" [ 1309.875556] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.884346] env[69927]: DEBUG oslo_vmware.api [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096989, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.386358] env[69927]: DEBUG oslo_vmware.api [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096989, 'name': ReconfigVM_Task, 'duration_secs': 0.285498} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.386736] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Reconfigured VM instance instance-00000078 to attach disk [datastore2] 9e9e93cc-e225-4ec7-850f-916aa078ba30/9e9e93cc-e225-4ec7-850f-916aa078ba30.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1310.387318] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bbaa7c38-51b0-4d2e-9b30-3604a25f1f88 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.394049] env[69927]: DEBUG oslo_vmware.api [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1310.394049] env[69927]: value = "task-4096990" [ 1310.394049] env[69927]: _type = "Task" [ 1310.394049] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.403817] env[69927]: DEBUG oslo_vmware.api [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096990, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.904725] env[69927]: DEBUG oslo_vmware.api [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096990, 'name': Rename_Task, 'duration_secs': 0.136671} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.905141] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1310.905381] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-da8c2989-fb5c-4e2f-a80e-e50397e705c2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.912869] env[69927]: DEBUG oslo_vmware.api [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1310.912869] env[69927]: value = "task-4096991" [ 1310.912869] env[69927]: _type = "Task" [ 1310.912869] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.921681] env[69927]: DEBUG oslo_vmware.api [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096991, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.423733] env[69927]: DEBUG oslo_vmware.api [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4096991, 'name': PowerOnVM_Task, 'duration_secs': 0.49827} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.424118] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1311.424330] env[69927]: INFO nova.compute.manager [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Took 8.76 seconds to spawn the instance on the hypervisor. [ 1311.424606] env[69927]: DEBUG nova.compute.manager [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1311.425482] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aadff541-fb79-45f1-b512-b26942eb993e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.944098] env[69927]: INFO nova.compute.manager [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Took 13.54 seconds to build instance. [ 1311.950536] env[69927]: DEBUG oslo_concurrency.lockutils [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "cff307ed-3c8b-4126-9749-1204597cbf6c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1311.950837] env[69927]: DEBUG oslo_concurrency.lockutils [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "cff307ed-3c8b-4126-9749-1204597cbf6c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1311.951109] env[69927]: DEBUG oslo_concurrency.lockutils [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "cff307ed-3c8b-4126-9749-1204597cbf6c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1311.951346] env[69927]: DEBUG oslo_concurrency.lockutils [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "cff307ed-3c8b-4126-9749-1204597cbf6c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1311.951562] env[69927]: DEBUG oslo_concurrency.lockutils [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "cff307ed-3c8b-4126-9749-1204597cbf6c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1311.953847] env[69927]: INFO nova.compute.manager [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Terminating instance [ 1312.446378] env[69927]: DEBUG oslo_concurrency.lockutils [None req-21cc4896-13d2-40d6-a93b-ff5b2195193b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "9e9e93cc-e225-4ec7-850f-916aa078ba30" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.054s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1312.458260] env[69927]: DEBUG nova.compute.manager [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1312.458810] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1312.459872] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-894f7a6a-9db1-4d58-a38d-79ecae564c94 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.468048] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1312.468505] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1e197ed5-d70c-42ef-b770-54514037f9ed {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.475857] env[69927]: DEBUG oslo_vmware.api [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1312.475857] env[69927]: value = "task-4096992" [ 1312.475857] env[69927]: _type = "Task" [ 1312.475857] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.485141] env[69927]: DEBUG oslo_vmware.api [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096992, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.986073] env[69927]: DEBUG oslo_vmware.api [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096992, 'name': PowerOffVM_Task, 'duration_secs': 0.396256} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.986350] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1312.986573] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1312.986833] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3db69acc-6ceb-401c-8984-ef6987acf23e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.049326] env[69927]: DEBUG nova.compute.manager [req-d07374cc-439f-464a-9748-fc982585c8cb req-833e2d1d-40ff-4ad9-aadb-44ca862f5351 service nova] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Received event network-changed-212263f4-2ce1-4c5f-a203-887ea0b690a0 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1313.049534] env[69927]: DEBUG nova.compute.manager [req-d07374cc-439f-464a-9748-fc982585c8cb req-833e2d1d-40ff-4ad9-aadb-44ca862f5351 service nova] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Refreshing instance network info cache due to event network-changed-212263f4-2ce1-4c5f-a203-887ea0b690a0. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1313.049765] env[69927]: DEBUG oslo_concurrency.lockutils [req-d07374cc-439f-464a-9748-fc982585c8cb req-833e2d1d-40ff-4ad9-aadb-44ca862f5351 service nova] Acquiring lock "refresh_cache-9e9e93cc-e225-4ec7-850f-916aa078ba30" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1313.049930] env[69927]: DEBUG oslo_concurrency.lockutils [req-d07374cc-439f-464a-9748-fc982585c8cb req-833e2d1d-40ff-4ad9-aadb-44ca862f5351 service nova] Acquired lock "refresh_cache-9e9e93cc-e225-4ec7-850f-916aa078ba30" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1313.050209] env[69927]: DEBUG nova.network.neutron [req-d07374cc-439f-464a-9748-fc982585c8cb req-833e2d1d-40ff-4ad9-aadb-44ca862f5351 service nova] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Refreshing network info cache for port 212263f4-2ce1-4c5f-a203-887ea0b690a0 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1313.052918] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1313.053673] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1313.053942] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Deleting the datastore file [datastore1] cff307ed-3c8b-4126-9749-1204597cbf6c {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1313.054509] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ae7c103b-8de9-4415-a433-4c670cd5f66a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.062816] env[69927]: DEBUG oslo_vmware.api [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1313.062816] env[69927]: value = "task-4096994" [ 1313.062816] env[69927]: _type = "Task" [ 1313.062816] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.072262] env[69927]: DEBUG oslo_vmware.api [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096994, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.574553] env[69927]: DEBUG oslo_vmware.api [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096994, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16588} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.574902] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1313.575008] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1313.575198] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1313.575377] env[69927]: INFO nova.compute.manager [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1313.575613] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1313.575797] env[69927]: DEBUG nova.compute.manager [-] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1313.575888] env[69927]: DEBUG nova.network.neutron [-] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1313.758722] env[69927]: DEBUG nova.network.neutron [req-d07374cc-439f-464a-9748-fc982585c8cb req-833e2d1d-40ff-4ad9-aadb-44ca862f5351 service nova] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Updated VIF entry in instance network info cache for port 212263f4-2ce1-4c5f-a203-887ea0b690a0. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1313.759122] env[69927]: DEBUG nova.network.neutron [req-d07374cc-439f-464a-9748-fc982585c8cb req-833e2d1d-40ff-4ad9-aadb-44ca862f5351 service nova] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Updating instance_info_cache with network_info: [{"id": "212263f4-2ce1-4c5f-a203-887ea0b690a0", "address": "fa:16:3e:f3:03:aa", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap212263f4-2c", "ovs_interfaceid": "212263f4-2ce1-4c5f-a203-887ea0b690a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1313.858066] env[69927]: DEBUG nova.compute.manager [req-f763d1ad-3368-45ec-8866-1d3e0a95c228 req-bac5e20e-537a-45c7-80fb-1c8647ec6dc7 service nova] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Received event network-vif-deleted-619d4a4d-a555-4b6b-a00b-87d1eb721427 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1313.858066] env[69927]: INFO nova.compute.manager [req-f763d1ad-3368-45ec-8866-1d3e0a95c228 req-bac5e20e-537a-45c7-80fb-1c8647ec6dc7 service nova] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Neutron deleted interface 619d4a4d-a555-4b6b-a00b-87d1eb721427; detaching it from the instance and deleting it from the info cache [ 1313.858353] env[69927]: DEBUG nova.network.neutron [req-f763d1ad-3368-45ec-8866-1d3e0a95c228 req-bac5e20e-537a-45c7-80fb-1c8647ec6dc7 service nova] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1314.261727] env[69927]: DEBUG oslo_concurrency.lockutils [req-d07374cc-439f-464a-9748-fc982585c8cb req-833e2d1d-40ff-4ad9-aadb-44ca862f5351 service nova] Releasing lock "refresh_cache-9e9e93cc-e225-4ec7-850f-916aa078ba30" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1314.338557] env[69927]: DEBUG nova.network.neutron [-] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1314.361110] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a0d9ec94-ad39-409b-8f03-05567910f307 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.372619] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e07b857-e0f1-4df9-892c-0610d387bfc0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.401655] env[69927]: DEBUG nova.compute.manager [req-f763d1ad-3368-45ec-8866-1d3e0a95c228 req-bac5e20e-537a-45c7-80fb-1c8647ec6dc7 service nova] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Detach interface failed, port_id=619d4a4d-a555-4b6b-a00b-87d1eb721427, reason: Instance cff307ed-3c8b-4126-9749-1204597cbf6c could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1314.841637] env[69927]: INFO nova.compute.manager [-] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Took 1.27 seconds to deallocate network for instance. [ 1315.349028] env[69927]: DEBUG oslo_concurrency.lockutils [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1315.349028] env[69927]: DEBUG oslo_concurrency.lockutils [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1315.349280] env[69927]: DEBUG nova.objects.instance [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lazy-loading 'resources' on Instance uuid cff307ed-3c8b-4126-9749-1204597cbf6c {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1315.940430] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf706313-c38d-4e56-b221-9ce2b56e59d1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.948534] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22622b3a-0d79-4d4d-bb54-7c25bb93b48d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.982036] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0ffad0d-9eab-42fa-9d27-a47422f84a78 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.988611] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159f5931-2a5e-45c4-a1a6-1467959c1796 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.002271] env[69927]: DEBUG nova.compute.provider_tree [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1316.506696] env[69927]: DEBUG nova.scheduler.client.report [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1317.012379] env[69927]: DEBUG oslo_concurrency.lockutils [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.663s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1317.034896] env[69927]: INFO nova.scheduler.client.report [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Deleted allocations for instance cff307ed-3c8b-4126-9749-1204597cbf6c [ 1317.544625] env[69927]: DEBUG oslo_concurrency.lockutils [None req-85ca48ed-acc1-4fc8-a2c0-0b27f46bf088 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "cff307ed-3c8b-4126-9749-1204597cbf6c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.594s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1318.635651] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "15c44d86-829f-4317-ab66-9e61d4fb4dd0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1318.635936] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "15c44d86-829f-4317-ab66-9e61d4fb4dd0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1318.636569] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "15c44d86-829f-4317-ab66-9e61d4fb4dd0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1318.636569] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "15c44d86-829f-4317-ab66-9e61d4fb4dd0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1318.636569] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "15c44d86-829f-4317-ab66-9e61d4fb4dd0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1318.638911] env[69927]: INFO nova.compute.manager [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Terminating instance [ 1319.142204] env[69927]: DEBUG nova.compute.manager [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1319.142438] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1319.143323] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2d5d43-32b9-4fa8-a1cc-d523ee1c993b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.151917] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1319.152095] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-81676009-662c-46a9-a221-dce298e88541 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.161144] env[69927]: DEBUG oslo_vmware.api [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1319.161144] env[69927]: value = "task-4096995" [ 1319.161144] env[69927]: _type = "Task" [ 1319.161144] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.172536] env[69927]: DEBUG oslo_vmware.api [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096995, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.507979] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1319.670775] env[69927]: DEBUG oslo_vmware.api [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096995, 'name': PowerOffVM_Task, 'duration_secs': 0.192187} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.671150] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1319.672117] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1319.672117] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8c265417-baf2-47b8-8b45-7532ae56de52 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.739653] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1319.739880] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1319.740113] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Deleting the datastore file [datastore1] 15c44d86-829f-4317-ab66-9e61d4fb4dd0 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1319.740393] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fd28a7a9-adeb-4576-b7e4-73d52ee7c980 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.748180] env[69927]: DEBUG oslo_vmware.api [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for the task: (returnval){ [ 1319.748180] env[69927]: value = "task-4096997" [ 1319.748180] env[69927]: _type = "Task" [ 1319.748180] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.755850] env[69927]: DEBUG oslo_vmware.api [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096997, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.258326] env[69927]: DEBUG oslo_vmware.api [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Task: {'id': task-4096997, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.343578} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.258667] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1320.258802] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1320.258984] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1320.259177] env[69927]: INFO nova.compute.manager [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1320.259423] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1320.259622] env[69927]: DEBUG nova.compute.manager [-] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1320.259717] env[69927]: DEBUG nova.network.neutron [-] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1320.516399] env[69927]: DEBUG nova.compute.manager [req-18c76b7e-8233-4baa-a0a5-69d89c29ee07 req-5127fc4c-6431-4695-b7a7-01f7c386323d service nova] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Received event network-vif-deleted-30a26167-3dd4-4729-be64-03c251eaaa48 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1320.516591] env[69927]: INFO nova.compute.manager [req-18c76b7e-8233-4baa-a0a5-69d89c29ee07 req-5127fc4c-6431-4695-b7a7-01f7c386323d service nova] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Neutron deleted interface 30a26167-3dd4-4729-be64-03c251eaaa48; detaching it from the instance and deleting it from the info cache [ 1320.516814] env[69927]: DEBUG nova.network.neutron [req-18c76b7e-8233-4baa-a0a5-69d89c29ee07 req-5127fc4c-6431-4695-b7a7-01f7c386323d service nova] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1321.001849] env[69927]: DEBUG nova.network.neutron [-] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1321.020183] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2b9e9245-7c66-4041-b0e4-581d9ff741fb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.030559] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a80470-2513-4886-92bc-67cf4cb02af2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.058196] env[69927]: DEBUG nova.compute.manager [req-18c76b7e-8233-4baa-a0a5-69d89c29ee07 req-5127fc4c-6431-4695-b7a7-01f7c386323d service nova] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Detach interface failed, port_id=30a26167-3dd4-4729-be64-03c251eaaa48, reason: Instance 15c44d86-829f-4317-ab66-9e61d4fb4dd0 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1321.504430] env[69927]: INFO nova.compute.manager [-] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Took 1.24 seconds to deallocate network for instance. [ 1322.010555] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1322.010932] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1322.011129] env[69927]: DEBUG nova.objects.instance [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lazy-loading 'resources' on Instance uuid 15c44d86-829f-4317-ab66-9e61d4fb4dd0 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1322.594339] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5cd3073-fdd7-4b27-b802-175d6cf39bb3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.603058] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-054f8da6-0ca2-4834-bf1b-45aee907267f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.637176] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b0985c-13ae-4e99-b71d-21db4dfc6a5e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.645609] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93fa572f-9d04-4cd3-a72c-7ca4cf043f09 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.659123] env[69927]: DEBUG nova.compute.provider_tree [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1323.163109] env[69927]: DEBUG nova.scheduler.client.report [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1323.667710] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.657s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1323.685572] env[69927]: INFO nova.scheduler.client.report [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Deleted allocations for instance 15c44d86-829f-4317-ab66-9e61d4fb4dd0 [ 1324.193175] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c41a1434-adfa-4a1a-8a32-5a00fd1c9968 tempest-ServerRescueNegativeTestJSON-1943285071 tempest-ServerRescueNegativeTestJSON-1943285071-project-member] Lock "15c44d86-829f-4317-ab66-9e61d4fb4dd0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.557s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1324.503035] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1325.507603] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1325.507921] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1326.507608] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1326.507864] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager.update_available_resource {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1327.013739] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1327.013948] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1327.014190] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1327.014345] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69927) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1327.015244] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7addb21d-4c12-4de4-a1ff-ded7764f8004 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.024734] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f928d816-e7f1-44c6-b54b-9157d57f982f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.040965] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c1fea51-5a32-4dd7-8096-fcd488f204b5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.048342] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7733a53-41d0-43f9-a420-37b7604b6d82 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.077296] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178912MB free_disk=17GB free_vcpus=48 pci_devices=None {{(pid=69927) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1327.077462] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1327.077679] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1328.104414] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance da468d11-82a4-4fec-b06a-1b522bacdbc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1328.104779] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1328.104779] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 6bfafa83-a9e2-4f7d-bbad-6b356f173b68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1328.104868] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 9e9e93cc-e225-4ec7-850f-916aa078ba30 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1328.104984] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1328.105137] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1328.166624] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9929a5a-a603-474b-bb0d-2ef493b4083d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.174336] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a727bd45-3d25-463e-a3af-f18051ee806f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.205241] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0f4f6c-5489-4fb1-af0f-7e1043815d1b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.213157] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4b0fc1-259d-4102-82db-d12b42135a91 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.227436] env[69927]: DEBUG nova.compute.provider_tree [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1328.731221] env[69927]: DEBUG nova.scheduler.client.report [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1329.236840] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69927) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1329.237238] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.159s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1330.236957] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1330.742830] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1330.743025] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1330.743114] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69927) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1340.747759] env[69927]: DEBUG oslo_concurrency.lockutils [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "6bfafa83-a9e2-4f7d-bbad-6b356f173b68" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1340.748161] env[69927]: DEBUG oslo_concurrency.lockutils [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "6bfafa83-a9e2-4f7d-bbad-6b356f173b68" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1340.748354] env[69927]: INFO nova.compute.manager [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Shelving [ 1341.367778] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "95c47fc8-fed0-4b55-8f65-61b46861e51d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1341.368025] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "95c47fc8-fed0-4b55-8f65-61b46861e51d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1341.758660] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1341.759087] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8987dd4f-0846-4706-8e3d-9343cc1ba6ec {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.767443] env[69927]: DEBUG oslo_vmware.api [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1341.767443] env[69927]: value = "task-4096998" [ 1341.767443] env[69927]: _type = "Task" [ 1341.767443] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.778016] env[69927]: DEBUG oslo_vmware.api [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096998, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.870955] env[69927]: DEBUG nova.compute.manager [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1342.278543] env[69927]: DEBUG oslo_vmware.api [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096998, 'name': PowerOffVM_Task, 'duration_secs': 0.189668} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.278824] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1342.279635] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd475ff9-8321-4350-ba45-d9717d672b70 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.298114] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a07af41-7d4f-4b1b-837f-3b357e39d87a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.394095] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1342.394371] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1342.395873] env[69927]: INFO nova.compute.claims [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1342.809476] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Creating Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1342.809920] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4e65b430-61c6-40e0-998b-d42f587d9c8e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.818336] env[69927]: DEBUG oslo_vmware.api [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1342.818336] env[69927]: value = "task-4096999" [ 1342.818336] env[69927]: _type = "Task" [ 1342.818336] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.827781] env[69927]: DEBUG oslo_vmware.api [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096999, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.328977] env[69927]: DEBUG oslo_vmware.api [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4096999, 'name': CreateSnapshot_Task, 'duration_secs': 0.414207} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.329295] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Created Snapshot of the VM instance {{(pid=69927) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1343.330094] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e6fa850-942a-413f-97e1-0743ed010970 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.478743] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66252569-7bb4-4aee-91ce-aa711691231b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.486197] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d831a36-1dad-40da-b4fb-5862c1642e86 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.518694] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d0154b3-2ac6-4b10-aac5-929adf2726d1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.527315] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-175a0b07-5c96-430e-b649-a597634e25c7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.541377] env[69927]: DEBUG nova.compute.provider_tree [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1343.850052] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Creating linked-clone VM from snapshot {{(pid=69927) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1343.850512] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1032cfd8-37ad-4186-9afe-07ab3f1baa72 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.859882] env[69927]: DEBUG oslo_vmware.api [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1343.859882] env[69927]: value = "task-4097000" [ 1343.859882] env[69927]: _type = "Task" [ 1343.859882] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.869518] env[69927]: DEBUG oslo_vmware.api [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097000, 'name': CloneVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.045026] env[69927]: DEBUG nova.scheduler.client.report [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1344.370905] env[69927]: DEBUG oslo_vmware.api [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097000, 'name': CloneVM_Task} progress is 94%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.550158] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.156s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1344.550814] env[69927]: DEBUG nova.compute.manager [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1344.875521] env[69927]: DEBUG oslo_vmware.api [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097000, 'name': CloneVM_Task} progress is 95%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.056311] env[69927]: DEBUG nova.compute.utils [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1345.057828] env[69927]: DEBUG nova.compute.manager [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1345.058026] env[69927]: DEBUG nova.network.neutron [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1345.103979] env[69927]: DEBUG nova.policy [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef08164611894c289d4c30194d91526a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0823381b9f644adf818b490c551f5a3f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1345.344555] env[69927]: DEBUG nova.network.neutron [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Successfully created port: 4d10cd3a-98db-44a5-a193-057f208e8f72 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1345.371794] env[69927]: DEBUG oslo_vmware.api [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097000, 'name': CloneVM_Task, 'duration_secs': 1.119405} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.372075] env[69927]: INFO nova.virt.vmwareapi.vmops [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Created linked-clone VM from snapshot [ 1345.372943] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e134e50f-24dc-4fb5-a6ae-f8a6c29f6a5a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.380475] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Uploading image 10dd49fa-1d39-4594-8a0f-2e3fe886eb62 {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1345.405519] env[69927]: DEBUG oslo_vmware.rw_handles [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1345.405519] env[69927]: value = "vm-811611" [ 1345.405519] env[69927]: _type = "VirtualMachine" [ 1345.405519] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1345.405883] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d498a4fd-5593-4228-b6a1-23f1b33b959f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.413328] env[69927]: DEBUG oslo_vmware.rw_handles [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lease: (returnval){ [ 1345.413328] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52543ec2-c721-ecb7-c59a-51d032e96c2f" [ 1345.413328] env[69927]: _type = "HttpNfcLease" [ 1345.413328] env[69927]: } obtained for exporting VM: (result){ [ 1345.413328] env[69927]: value = "vm-811611" [ 1345.413328] env[69927]: _type = "VirtualMachine" [ 1345.413328] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1345.413809] env[69927]: DEBUG oslo_vmware.api [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the lease: (returnval){ [ 1345.413809] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52543ec2-c721-ecb7-c59a-51d032e96c2f" [ 1345.413809] env[69927]: _type = "HttpNfcLease" [ 1345.413809] env[69927]: } to be ready. {{(pid=69927) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1345.420220] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1345.420220] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52543ec2-c721-ecb7-c59a-51d032e96c2f" [ 1345.420220] env[69927]: _type = "HttpNfcLease" [ 1345.420220] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1345.561779] env[69927]: DEBUG nova.compute.manager [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1345.922267] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1345.922267] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52543ec2-c721-ecb7-c59a-51d032e96c2f" [ 1345.922267] env[69927]: _type = "HttpNfcLease" [ 1345.922267] env[69927]: } is ready. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1345.922704] env[69927]: DEBUG oslo_vmware.rw_handles [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1345.922704] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52543ec2-c721-ecb7-c59a-51d032e96c2f" [ 1345.922704] env[69927]: _type = "HttpNfcLease" [ 1345.922704] env[69927]: }. {{(pid=69927) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1345.923331] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ff58ebb-9048-43ab-8191-0298e5db9974 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.932337] env[69927]: DEBUG oslo_vmware.rw_handles [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52517b49-cccc-3fbd-12cb-337c0287ad8a/disk-0.vmdk from lease info. {{(pid=69927) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1345.932540] env[69927]: DEBUG oslo_vmware.rw_handles [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52517b49-cccc-3fbd-12cb-337c0287ad8a/disk-0.vmdk for reading. {{(pid=69927) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1346.017977] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8d412d65-6f22-4668-8a88-b15cf83f46af {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.571202] env[69927]: DEBUG nova.compute.manager [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1346.598462] env[69927]: DEBUG nova.virt.hardware [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1346.599012] env[69927]: DEBUG nova.virt.hardware [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1346.599609] env[69927]: DEBUG nova.virt.hardware [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1346.599707] env[69927]: DEBUG nova.virt.hardware [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1346.599940] env[69927]: DEBUG nova.virt.hardware [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1346.600142] env[69927]: DEBUG nova.virt.hardware [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1346.600433] env[69927]: DEBUG nova.virt.hardware [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1346.600624] env[69927]: DEBUG nova.virt.hardware [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1346.600891] env[69927]: DEBUG nova.virt.hardware [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1346.601154] env[69927]: DEBUG nova.virt.hardware [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1346.601358] env[69927]: DEBUG nova.virt.hardware [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1346.602278] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc770fa-8a2f-4c5d-82d2-ea939c8f2185 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.611256] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070be499-409e-4f07-8a94-2d538fa16067 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.722781] env[69927]: DEBUG nova.compute.manager [req-69b605c6-85cb-4b93-a861-7b4d8878e9cc req-b16effb6-c572-44ee-88cd-1f806be89550 service nova] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Received event network-vif-plugged-4d10cd3a-98db-44a5-a193-057f208e8f72 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1346.722990] env[69927]: DEBUG oslo_concurrency.lockutils [req-69b605c6-85cb-4b93-a861-7b4d8878e9cc req-b16effb6-c572-44ee-88cd-1f806be89550 service nova] Acquiring lock "95c47fc8-fed0-4b55-8f65-61b46861e51d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1346.723228] env[69927]: DEBUG oslo_concurrency.lockutils [req-69b605c6-85cb-4b93-a861-7b4d8878e9cc req-b16effb6-c572-44ee-88cd-1f806be89550 service nova] Lock "95c47fc8-fed0-4b55-8f65-61b46861e51d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1346.723496] env[69927]: DEBUG oslo_concurrency.lockutils [req-69b605c6-85cb-4b93-a861-7b4d8878e9cc req-b16effb6-c572-44ee-88cd-1f806be89550 service nova] Lock "95c47fc8-fed0-4b55-8f65-61b46861e51d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1346.723689] env[69927]: DEBUG nova.compute.manager [req-69b605c6-85cb-4b93-a861-7b4d8878e9cc req-b16effb6-c572-44ee-88cd-1f806be89550 service nova] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] No waiting events found dispatching network-vif-plugged-4d10cd3a-98db-44a5-a193-057f208e8f72 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1346.723873] env[69927]: WARNING nova.compute.manager [req-69b605c6-85cb-4b93-a861-7b4d8878e9cc req-b16effb6-c572-44ee-88cd-1f806be89550 service nova] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Received unexpected event network-vif-plugged-4d10cd3a-98db-44a5-a193-057f208e8f72 for instance with vm_state building and task_state spawning. [ 1346.818645] env[69927]: DEBUG nova.network.neutron [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Successfully updated port: 4d10cd3a-98db-44a5-a193-057f208e8f72 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1347.320511] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "refresh_cache-95c47fc8-fed0-4b55-8f65-61b46861e51d" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.320944] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired lock "refresh_cache-95c47fc8-fed0-4b55-8f65-61b46861e51d" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1347.320944] env[69927]: DEBUG nova.network.neutron [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1347.855721] env[69927]: DEBUG nova.network.neutron [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1347.990455] env[69927]: DEBUG nova.network.neutron [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Updating instance_info_cache with network_info: [{"id": "4d10cd3a-98db-44a5-a193-057f208e8f72", "address": "fa:16:3e:5b:5d:e6", "network": {"id": "1b67ec75-5b1a-4408-976d-0bd585378451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1628882259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0823381b9f644adf818b490c551f5a3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d10cd3a-98", "ovs_interfaceid": "4d10cd3a-98db-44a5-a193-057f208e8f72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.493741] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Releasing lock "refresh_cache-95c47fc8-fed0-4b55-8f65-61b46861e51d" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1348.494176] env[69927]: DEBUG nova.compute.manager [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Instance network_info: |[{"id": "4d10cd3a-98db-44a5-a193-057f208e8f72", "address": "fa:16:3e:5b:5d:e6", "network": {"id": "1b67ec75-5b1a-4408-976d-0bd585378451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1628882259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0823381b9f644adf818b490c551f5a3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d10cd3a-98", "ovs_interfaceid": "4d10cd3a-98db-44a5-a193-057f208e8f72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1348.494573] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:5d:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0248a27a-1d7f-4195-987b-06bfc8467347', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d10cd3a-98db-44a5-a193-057f208e8f72', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1348.502320] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1348.502578] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1348.503020] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ecda0d06-f676-4b8f-9a41-92e1109b9d09 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.524763] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1348.524763] env[69927]: value = "task-4097002" [ 1348.524763] env[69927]: _type = "Task" [ 1348.524763] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.533449] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097002, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.750902] env[69927]: DEBUG nova.compute.manager [req-1fe6e6cb-52d8-4c98-bee4-a6c386a1bdac req-0401bd8c-f18f-497d-8366-a25136bf0cbe service nova] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Received event network-changed-4d10cd3a-98db-44a5-a193-057f208e8f72 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1348.751114] env[69927]: DEBUG nova.compute.manager [req-1fe6e6cb-52d8-4c98-bee4-a6c386a1bdac req-0401bd8c-f18f-497d-8366-a25136bf0cbe service nova] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Refreshing instance network info cache due to event network-changed-4d10cd3a-98db-44a5-a193-057f208e8f72. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1348.752022] env[69927]: DEBUG oslo_concurrency.lockutils [req-1fe6e6cb-52d8-4c98-bee4-a6c386a1bdac req-0401bd8c-f18f-497d-8366-a25136bf0cbe service nova] Acquiring lock "refresh_cache-95c47fc8-fed0-4b55-8f65-61b46861e51d" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1348.752022] env[69927]: DEBUG oslo_concurrency.lockutils [req-1fe6e6cb-52d8-4c98-bee4-a6c386a1bdac req-0401bd8c-f18f-497d-8366-a25136bf0cbe service nova] Acquired lock "refresh_cache-95c47fc8-fed0-4b55-8f65-61b46861e51d" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1348.752022] env[69927]: DEBUG nova.network.neutron [req-1fe6e6cb-52d8-4c98-bee4-a6c386a1bdac req-0401bd8c-f18f-497d-8366-a25136bf0cbe service nova] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Refreshing network info cache for port 4d10cd3a-98db-44a5-a193-057f208e8f72 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1349.035121] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097002, 'name': CreateVM_Task, 'duration_secs': 0.399433} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.035331] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1349.036019] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1349.036207] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1349.036562] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1349.036823] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a930b42-9449-4492-9cc7-7ddc4127b97e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.042530] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1349.042530] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f7eb3c-627f-91aa-5706-e97bc2db5cd5" [ 1349.042530] env[69927]: _type = "Task" [ 1349.042530] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.051019] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f7eb3c-627f-91aa-5706-e97bc2db5cd5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.462980] env[69927]: DEBUG nova.network.neutron [req-1fe6e6cb-52d8-4c98-bee4-a6c386a1bdac req-0401bd8c-f18f-497d-8366-a25136bf0cbe service nova] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Updated VIF entry in instance network info cache for port 4d10cd3a-98db-44a5-a193-057f208e8f72. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1349.463449] env[69927]: DEBUG nova.network.neutron [req-1fe6e6cb-52d8-4c98-bee4-a6c386a1bdac req-0401bd8c-f18f-497d-8366-a25136bf0cbe service nova] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Updating instance_info_cache with network_info: [{"id": "4d10cd3a-98db-44a5-a193-057f208e8f72", "address": "fa:16:3e:5b:5d:e6", "network": {"id": "1b67ec75-5b1a-4408-976d-0bd585378451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1628882259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0823381b9f644adf818b490c551f5a3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d10cd3a-98", "ovs_interfaceid": "4d10cd3a-98db-44a5-a193-057f208e8f72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1349.554257] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f7eb3c-627f-91aa-5706-e97bc2db5cd5, 'name': SearchDatastore_Task, 'duration_secs': 0.013124} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.554764] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1349.554857] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1349.555054] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1349.555196] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1349.555350] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1349.555628] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ec2ea96-17e6-45ef-9c80-69b4e32c1c03 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.573248] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1349.573504] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1349.574574] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db4e12c4-f67e-4159-b644-4bad795f7cc7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.582042] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1349.582042] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]527fe362-c99f-4249-e94e-71cfe3fd0118" [ 1349.582042] env[69927]: _type = "Task" [ 1349.582042] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.592141] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]527fe362-c99f-4249-e94e-71cfe3fd0118, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.967645] env[69927]: DEBUG oslo_concurrency.lockutils [req-1fe6e6cb-52d8-4c98-bee4-a6c386a1bdac req-0401bd8c-f18f-497d-8366-a25136bf0cbe service nova] Releasing lock "refresh_cache-95c47fc8-fed0-4b55-8f65-61b46861e51d" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1350.092934] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]527fe362-c99f-4249-e94e-71cfe3fd0118, 'name': SearchDatastore_Task, 'duration_secs': 0.030252} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.093813] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1935d073-2a5c-405a-97b9-03fdaa4dc1cd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.099574] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1350.099574] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d5947a-d726-de14-7aa2-54935de0f5df" [ 1350.099574] env[69927]: _type = "Task" [ 1350.099574] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.107857] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d5947a-d726-de14-7aa2-54935de0f5df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.610744] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52d5947a-d726-de14-7aa2-54935de0f5df, 'name': SearchDatastore_Task, 'duration_secs': 0.014442} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.611175] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1350.611373] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 95c47fc8-fed0-4b55-8f65-61b46861e51d/95c47fc8-fed0-4b55-8f65-61b46861e51d.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1350.611631] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b4b85ce7-ccbd-4e88-bc2a-a0a11fb52c71 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.619298] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1350.619298] env[69927]: value = "task-4097003" [ 1350.619298] env[69927]: _type = "Task" [ 1350.619298] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.627544] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097003, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.883555] env[69927]: INFO nova.compute.manager [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Rebuilding instance [ 1350.933471] env[69927]: DEBUG nova.compute.manager [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1350.934410] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aff217e-5bdb-4c30-9fa7-ac17e197624f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.131791] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097003, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.630666] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097003, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.572553} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.631075] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 95c47fc8-fed0-4b55-8f65-61b46861e51d/95c47fc8-fed0-4b55-8f65-61b46861e51d.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1351.631180] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1351.631460] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-03794432-91b8-4312-a645-3a45fa6908ae {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.638162] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1351.638162] env[69927]: value = "task-4097004" [ 1351.638162] env[69927]: _type = "Task" [ 1351.638162] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.648446] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097004, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.949496] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1351.949769] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-36cb7a5c-c5f9-4f00-9d59-ae5f3da85fc4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.957364] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1351.957364] env[69927]: value = "task-4097005" [ 1351.957364] env[69927]: _type = "Task" [ 1351.957364] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.966248] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097005, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.149223] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097004, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071204} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.149518] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1352.150341] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d5b11c-d6b5-4640-97f4-011175bea3fd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.173984] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 95c47fc8-fed0-4b55-8f65-61b46861e51d/95c47fc8-fed0-4b55-8f65-61b46861e51d.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1352.174326] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd5f0765-d1da-47a5-b719-bcbcce9889e7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.194139] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1352.194139] env[69927]: value = "task-4097006" [ 1352.194139] env[69927]: _type = "Task" [ 1352.194139] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.203349] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097006, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.468625] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097005, 'name': PowerOffVM_Task, 'duration_secs': 0.201381} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.468756] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1352.469097] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1352.469947] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33a0572-af13-41ba-87f0-c3e9d14751e4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.478029] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1352.478172] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ec8469e2-553d-4eb2-9fe3-c79e7013a9c5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.551648] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1352.551969] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1352.552376] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Deleting the datastore file [datastore2] 9e9e93cc-e225-4ec7-850f-916aa078ba30 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1352.552551] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a8cd8c4d-682e-4bb0-b117-e355f56450ac {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.560093] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1352.560093] env[69927]: value = "task-4097008" [ 1352.560093] env[69927]: _type = "Task" [ 1352.560093] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.569109] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097008, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.706018] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097006, 'name': ReconfigVM_Task, 'duration_secs': 0.417132} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.706438] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 95c47fc8-fed0-4b55-8f65-61b46861e51d/95c47fc8-fed0-4b55-8f65-61b46861e51d.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1352.707198] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0167e0ff-95d8-40be-a688-b922fbe16a7e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.715255] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1352.715255] env[69927]: value = "task-4097009" [ 1352.715255] env[69927]: _type = "Task" [ 1352.715255] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.728529] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097009, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.069667] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097008, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.209287} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.069921] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1353.070118] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1353.070308] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1353.226033] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097009, 'name': Rename_Task, 'duration_secs': 0.196483} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.226366] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1353.226616] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-951cfcfa-6ba2-47b7-98f4-52fa00352d52 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.234332] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1353.234332] env[69927]: value = "task-4097010" [ 1353.234332] env[69927]: _type = "Task" [ 1353.234332] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.243244] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097010, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.555820] env[69927]: DEBUG oslo_vmware.rw_handles [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52517b49-cccc-3fbd-12cb-337c0287ad8a/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1353.556753] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d7338a3-8779-4ebe-8e1a-4ea2375543e3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.563465] env[69927]: DEBUG oslo_vmware.rw_handles [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52517b49-cccc-3fbd-12cb-337c0287ad8a/disk-0.vmdk is in state: ready. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1353.563633] env[69927]: ERROR oslo_vmware.rw_handles [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52517b49-cccc-3fbd-12cb-337c0287ad8a/disk-0.vmdk due to incomplete transfer. [ 1353.563878] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a8396f2e-3f82-49f9-982c-cd51f25bbf9c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.572455] env[69927]: DEBUG oslo_vmware.rw_handles [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52517b49-cccc-3fbd-12cb-337c0287ad8a/disk-0.vmdk. {{(pid=69927) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1353.572670] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Uploaded image 10dd49fa-1d39-4594-8a0f-2e3fe886eb62 to the Glance image server {{(pid=69927) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1353.574914] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Destroying the VM {{(pid=69927) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1353.578322] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ffc70ddf-cbd2-4c7b-8d07-1b36b8ef3ab4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.585827] env[69927]: DEBUG oslo_vmware.api [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1353.585827] env[69927]: value = "task-4097011" [ 1353.585827] env[69927]: _type = "Task" [ 1353.585827] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.594317] env[69927]: DEBUG oslo_vmware.api [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097011, 'name': Destroy_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.744890] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097010, 'name': PowerOnVM_Task} progress is 90%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.096317] env[69927]: DEBUG oslo_vmware.api [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097011, 'name': Destroy_Task} progress is 33%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.103902] env[69927]: DEBUG nova.virt.hardware [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1354.104156] env[69927]: DEBUG nova.virt.hardware [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1354.104318] env[69927]: DEBUG nova.virt.hardware [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1354.104501] env[69927]: DEBUG nova.virt.hardware [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1354.104647] env[69927]: DEBUG nova.virt.hardware [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1354.104796] env[69927]: DEBUG nova.virt.hardware [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1354.105015] env[69927]: DEBUG nova.virt.hardware [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1354.105185] env[69927]: DEBUG nova.virt.hardware [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1354.105355] env[69927]: DEBUG nova.virt.hardware [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1354.105517] env[69927]: DEBUG nova.virt.hardware [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1354.105693] env[69927]: DEBUG nova.virt.hardware [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1354.106486] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19279d9d-c046-4dba-8ac3-2ff10157cf0e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.113471] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-502d6518-754e-4e7a-abed-9462259c5116 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.127042] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:03:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '57691231-2b8d-4d71-8f79-d4a6a1d95ec8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '212263f4-2ce1-4c5f-a203-887ea0b690a0', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1354.134330] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1354.134587] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1354.134798] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d8f92dc8-e4d8-4632-bec2-49e2f7066093 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.154832] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1354.154832] env[69927]: value = "task-4097012" [ 1354.154832] env[69927]: _type = "Task" [ 1354.154832] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.164436] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097012, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.245526] env[69927]: DEBUG oslo_vmware.api [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097010, 'name': PowerOnVM_Task, 'duration_secs': 0.79366} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.245783] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1354.245984] env[69927]: INFO nova.compute.manager [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Took 7.67 seconds to spawn the instance on the hypervisor. [ 1354.246183] env[69927]: DEBUG nova.compute.manager [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1354.246959] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8235a2ba-cc82-4d37-9b7a-4b97c53466ae {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.598364] env[69927]: DEBUG oslo_vmware.api [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097011, 'name': Destroy_Task, 'duration_secs': 0.952173} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.598616] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Destroyed the VM [ 1354.598859] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Deleting Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1354.599119] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-04a4f6f8-029d-4af8-89a4-d1a622f80b1a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.605552] env[69927]: DEBUG oslo_vmware.api [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1354.605552] env[69927]: value = "task-4097013" [ 1354.605552] env[69927]: _type = "Task" [ 1354.605552] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.613578] env[69927]: DEBUG oslo_vmware.api [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097013, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.664765] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097012, 'name': CreateVM_Task, 'duration_secs': 0.407734} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.665058] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1354.665677] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1354.665850] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1354.666193] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1354.666450] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96ea4afc-63af-4bf3-8790-32ba07e1aa3e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.671292] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1354.671292] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a2f077-4504-7a6a-bf6a-29c11e620bb6" [ 1354.671292] env[69927]: _type = "Task" [ 1354.671292] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.679349] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a2f077-4504-7a6a-bf6a-29c11e620bb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.763140] env[69927]: INFO nova.compute.manager [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Took 12.39 seconds to build instance. [ 1355.116257] env[69927]: DEBUG oslo_vmware.api [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097013, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.182937] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a2f077-4504-7a6a-bf6a-29c11e620bb6, 'name': SearchDatastore_Task, 'duration_secs': 0.02629} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.183840] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1355.184092] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1355.184343] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1355.184495] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1355.184679] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1355.185188] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b511bfb9-b119-4079-809d-c132759b4e83 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.195040] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1355.195040] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1355.195555] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-482f0590-f09b-4cb9-8199-44e8020299f8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.201082] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1355.201082] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5265892a-91b3-e30b-3936-346f1064baf1" [ 1355.201082] env[69927]: _type = "Task" [ 1355.201082] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.209728] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5265892a-91b3-e30b-3936-346f1064baf1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.265579] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45bfd641-365f-4eac-900b-cf266984ba00 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "95c47fc8-fed0-4b55-8f65-61b46861e51d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.897s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1355.617088] env[69927]: DEBUG oslo_vmware.api [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097013, 'name': RemoveSnapshot_Task, 'duration_secs': 0.5442} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.617398] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Deleted Snapshot of the VM instance {{(pid=69927) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1355.617747] env[69927]: DEBUG nova.compute.manager [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1355.618597] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f02621b-329e-4c0b-9f1c-2444a7ea8573 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.672931] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b8b7001-a875-4d0c-80c1-92ff408d1812 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "95c47fc8-fed0-4b55-8f65-61b46861e51d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1355.673214] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b8b7001-a875-4d0c-80c1-92ff408d1812 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "95c47fc8-fed0-4b55-8f65-61b46861e51d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1355.673402] env[69927]: DEBUG nova.compute.manager [None req-9b8b7001-a875-4d0c-80c1-92ff408d1812 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1355.674258] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad6fe89d-c84b-4523-b45c-62353dbeb49d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.681991] env[69927]: DEBUG nova.compute.manager [None req-9b8b7001-a875-4d0c-80c1-92ff408d1812 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69927) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1355.682557] env[69927]: DEBUG nova.objects.instance [None req-9b8b7001-a875-4d0c-80c1-92ff408d1812 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lazy-loading 'flavor' on Instance uuid 95c47fc8-fed0-4b55-8f65-61b46861e51d {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1355.711267] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5265892a-91b3-e30b-3936-346f1064baf1, 'name': SearchDatastore_Task, 'duration_secs': 0.010026} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.712073] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-519483a3-14c7-4a05-b698-a5ed1a0ec283 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.717420] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1355.717420] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521bb00e-df5e-112f-182f-ce60b0268e34" [ 1355.717420] env[69927]: _type = "Task" [ 1355.717420] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.724996] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521bb00e-df5e-112f-182f-ce60b0268e34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.131264] env[69927]: INFO nova.compute.manager [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Shelve offloading [ 1356.230576] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521bb00e-df5e-112f-182f-ce60b0268e34, 'name': SearchDatastore_Task, 'duration_secs': 0.00939} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.230817] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1356.231107] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 9e9e93cc-e225-4ec7-850f-916aa078ba30/9e9e93cc-e225-4ec7-850f-916aa078ba30.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1356.231395] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09f2c5b8-6770-48d4-9c5c-6268c892eaf2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.238442] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1356.238442] env[69927]: value = "task-4097014" [ 1356.238442] env[69927]: _type = "Task" [ 1356.238442] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.246819] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097014, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.634824] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1356.635247] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e16d2a4d-b897-454d-b3b2-41de426ee2b9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.643390] env[69927]: DEBUG oslo_vmware.api [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1356.643390] env[69927]: value = "task-4097015" [ 1356.643390] env[69927]: _type = "Task" [ 1356.643390] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.655566] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] VM already powered off {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1356.655823] env[69927]: DEBUG nova.compute.manager [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1356.656669] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8151811-c9b5-42ec-84a5-5376d73c39f6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.665180] env[69927]: DEBUG oslo_concurrency.lockutils [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "refresh_cache-6bfafa83-a9e2-4f7d-bbad-6b356f173b68" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1356.665367] env[69927]: DEBUG oslo_concurrency.lockutils [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquired lock "refresh_cache-6bfafa83-a9e2-4f7d-bbad-6b356f173b68" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1356.665547] env[69927]: DEBUG nova.network.neutron [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1356.688602] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b8b7001-a875-4d0c-80c1-92ff408d1812 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1356.689327] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a4823189-e59a-4796-9419-d80e066d1fff {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.696822] env[69927]: DEBUG oslo_vmware.api [None req-9b8b7001-a875-4d0c-80c1-92ff408d1812 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1356.696822] env[69927]: value = "task-4097016" [ 1356.696822] env[69927]: _type = "Task" [ 1356.696822] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.705625] env[69927]: DEBUG oslo_vmware.api [None req-9b8b7001-a875-4d0c-80c1-92ff408d1812 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097016, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.749833] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097014, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471269} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.750126] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 9e9e93cc-e225-4ec7-850f-916aa078ba30/9e9e93cc-e225-4ec7-850f-916aa078ba30.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1356.750352] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1356.750663] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6b4f363f-23c1-44e0-8c95-38c6fe55ce1b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.757904] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1356.757904] env[69927]: value = "task-4097017" [ 1356.757904] env[69927]: _type = "Task" [ 1356.757904] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.767427] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097017, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.208894] env[69927]: DEBUG oslo_vmware.api [None req-9b8b7001-a875-4d0c-80c1-92ff408d1812 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097016, 'name': PowerOffVM_Task, 'duration_secs': 0.190938} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.209284] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b8b7001-a875-4d0c-80c1-92ff408d1812 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1357.209372] env[69927]: DEBUG nova.compute.manager [None req-9b8b7001-a875-4d0c-80c1-92ff408d1812 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1357.210145] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d0e3f94-be5b-4b68-b445-91d4bb6b7f51 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.267680] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097017, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086188} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.267903] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1357.268699] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfce5e14-7e77-4d96-9993-fdfee0835efd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.292780] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] 9e9e93cc-e225-4ec7-850f-916aa078ba30/9e9e93cc-e225-4ec7-850f-916aa078ba30.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1357.295280] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6770b648-9584-475d-aa43-0c970e0d19a2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.316052] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1357.316052] env[69927]: value = "task-4097018" [ 1357.316052] env[69927]: _type = "Task" [ 1357.316052] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.324974] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097018, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.403755] env[69927]: DEBUG nova.network.neutron [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Updating instance_info_cache with network_info: [{"id": "cdd401dc-1a64-40f2-931e-f19611bb56d3", "address": "fa:16:3e:fc:81:80", "network": {"id": "bd90fedf-a086-42e6-9814-07044a035f6e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-951403655-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cf6bb3492c642aa9a168e484299289c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdd401dc-1a", "ovs_interfaceid": "cdd401dc-1a64-40f2-931e-f19611bb56d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1357.721923] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9b8b7001-a875-4d0c-80c1-92ff408d1812 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "95c47fc8-fed0-4b55-8f65-61b46861e51d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.049s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1357.826487] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097018, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.906580] env[69927]: DEBUG oslo_concurrency.lockutils [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Releasing lock "refresh_cache-6bfafa83-a9e2-4f7d-bbad-6b356f173b68" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1358.133646] env[69927]: INFO nova.compute.manager [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Rebuilding instance [ 1358.137862] env[69927]: DEBUG nova.compute.manager [req-abcace80-f037-4ca9-9623-a3fe77b9773e req-a893d9c9-1ee3-457c-8280-247b6f1d53df service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Received event network-vif-unplugged-cdd401dc-1a64-40f2-931e-f19611bb56d3 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1358.138047] env[69927]: DEBUG oslo_concurrency.lockutils [req-abcace80-f037-4ca9-9623-a3fe77b9773e req-a893d9c9-1ee3-457c-8280-247b6f1d53df service nova] Acquiring lock "6bfafa83-a9e2-4f7d-bbad-6b356f173b68-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1358.138261] env[69927]: DEBUG oslo_concurrency.lockutils [req-abcace80-f037-4ca9-9623-a3fe77b9773e req-a893d9c9-1ee3-457c-8280-247b6f1d53df service nova] Lock "6bfafa83-a9e2-4f7d-bbad-6b356f173b68-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1358.138438] env[69927]: DEBUG oslo_concurrency.lockutils [req-abcace80-f037-4ca9-9623-a3fe77b9773e req-a893d9c9-1ee3-457c-8280-247b6f1d53df service nova] Lock "6bfafa83-a9e2-4f7d-bbad-6b356f173b68-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1358.138607] env[69927]: DEBUG nova.compute.manager [req-abcace80-f037-4ca9-9623-a3fe77b9773e req-a893d9c9-1ee3-457c-8280-247b6f1d53df service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] No waiting events found dispatching network-vif-unplugged-cdd401dc-1a64-40f2-931e-f19611bb56d3 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1358.138773] env[69927]: WARNING nova.compute.manager [req-abcace80-f037-4ca9-9623-a3fe77b9773e req-a893d9c9-1ee3-457c-8280-247b6f1d53df service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Received unexpected event network-vif-unplugged-cdd401dc-1a64-40f2-931e-f19611bb56d3 for instance with vm_state shelved and task_state shelving_offloading. [ 1358.182030] env[69927]: DEBUG nova.compute.manager [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1358.182906] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c4bd84f-9002-4ae5-9b00-b52c33055b02 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.236930] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1358.238089] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e05a7a5-8670-4899-b103-6f821496e912 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.245489] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1358.245725] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d04c7dc4-d070-484c-9869-bf6040075f41 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.325415] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1358.325646] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1358.325812] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Deleting the datastore file [datastore1] 6bfafa83-a9e2-4f7d-bbad-6b356f173b68 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1358.328924] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dcfcc1b5-b10a-4f0c-b7be-b83179b7ec11 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.330769] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097018, 'name': ReconfigVM_Task, 'duration_secs': 0.708821} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.331043] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Reconfigured VM instance instance-00000078 to attach disk [datastore2] 9e9e93cc-e225-4ec7-850f-916aa078ba30/9e9e93cc-e225-4ec7-850f-916aa078ba30.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1358.332014] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d67c6afe-8173-49ec-b92a-f0b72f31b74f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.335879] env[69927]: DEBUG oslo_vmware.api [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1358.335879] env[69927]: value = "task-4097020" [ 1358.335879] env[69927]: _type = "Task" [ 1358.335879] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.340431] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1358.340431] env[69927]: value = "task-4097021" [ 1358.340431] env[69927]: _type = "Task" [ 1358.340431] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.346928] env[69927]: DEBUG oslo_vmware.api [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097020, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.352163] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097021, 'name': Rename_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.847547] env[69927]: DEBUG oslo_vmware.api [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097020, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131406} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.848237] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1358.848476] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1358.848686] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1358.853815] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097021, 'name': Rename_Task, 'duration_secs': 0.147846} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.854074] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1358.854327] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bdc35c5d-ad70-4804-82f1-269467bda229 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.860606] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1358.860606] env[69927]: value = "task-4097022" [ 1358.860606] env[69927]: _type = "Task" [ 1358.860606] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.869049] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097022, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.873989] env[69927]: INFO nova.scheduler.client.report [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Deleted allocations for instance 6bfafa83-a9e2-4f7d-bbad-6b356f173b68 [ 1359.197700] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1359.198165] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0aeae284-8fa9-4a55-a2e9-530291a81fa8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.206836] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1359.206836] env[69927]: value = "task-4097023" [ 1359.206836] env[69927]: _type = "Task" [ 1359.206836] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.215446] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097023, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.372666] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097022, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.377940] env[69927]: DEBUG oslo_concurrency.lockutils [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1359.378307] env[69927]: DEBUG oslo_concurrency.lockutils [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1359.378592] env[69927]: DEBUG nova.objects.instance [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lazy-loading 'resources' on Instance uuid 6bfafa83-a9e2-4f7d-bbad-6b356f173b68 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1359.717170] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] VM already powered off {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1359.717425] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1359.718205] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea3bf0e-dec6-4670-a2dc-d7b75c98dc28 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.724815] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1359.725044] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0d623384-9402-4f0a-9a89-3b6f2c313d70 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.792394] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1359.792723] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1359.792993] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Deleting the datastore file [datastore1] 95c47fc8-fed0-4b55-8f65-61b46861e51d {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1359.793352] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8ce3e078-a619-4567-9abb-52a0144fd4c6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.799453] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1359.799453] env[69927]: value = "task-4097025" [ 1359.799453] env[69927]: _type = "Task" [ 1359.799453] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.807520] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097025, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.872350] env[69927]: DEBUG oslo_vmware.api [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097022, 'name': PowerOnVM_Task, 'duration_secs': 0.659036} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.872698] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1359.872966] env[69927]: DEBUG nova.compute.manager [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1359.873903] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da8b91f5-b774-472f-b3ca-fce30a1d0f26 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.880622] env[69927]: DEBUG nova.objects.instance [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lazy-loading 'numa_topology' on Instance uuid 6bfafa83-a9e2-4f7d-bbad-6b356f173b68 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1360.163046] env[69927]: DEBUG nova.compute.manager [req-bbf1804d-ca43-4611-a94c-ec967414ad26 req-b0fd30c4-ab11-4e38-bd74-a75c9e4d9ff1 service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Received event network-changed-cdd401dc-1a64-40f2-931e-f19611bb56d3 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1360.163278] env[69927]: DEBUG nova.compute.manager [req-bbf1804d-ca43-4611-a94c-ec967414ad26 req-b0fd30c4-ab11-4e38-bd74-a75c9e4d9ff1 service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Refreshing instance network info cache due to event network-changed-cdd401dc-1a64-40f2-931e-f19611bb56d3. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1360.163508] env[69927]: DEBUG oslo_concurrency.lockutils [req-bbf1804d-ca43-4611-a94c-ec967414ad26 req-b0fd30c4-ab11-4e38-bd74-a75c9e4d9ff1 service nova] Acquiring lock "refresh_cache-6bfafa83-a9e2-4f7d-bbad-6b356f173b68" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1360.163688] env[69927]: DEBUG oslo_concurrency.lockutils [req-bbf1804d-ca43-4611-a94c-ec967414ad26 req-b0fd30c4-ab11-4e38-bd74-a75c9e4d9ff1 service nova] Acquired lock "refresh_cache-6bfafa83-a9e2-4f7d-bbad-6b356f173b68" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1360.163857] env[69927]: DEBUG nova.network.neutron [req-bbf1804d-ca43-4611-a94c-ec967414ad26 req-b0fd30c4-ab11-4e38-bd74-a75c9e4d9ff1 service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Refreshing network info cache for port cdd401dc-1a64-40f2-931e-f19611bb56d3 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1360.308912] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097025, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128416} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.309182] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1360.309369] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1360.309542] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1360.384888] env[69927]: DEBUG nova.objects.base [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Object Instance<6bfafa83-a9e2-4f7d-bbad-6b356f173b68> lazy-loaded attributes: resources,numa_topology {{(pid=69927) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1360.394682] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1360.462663] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea548e5-02a3-4c15-91ca-def11fdde060 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.470657] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2d08f6-3faa-4031-8ea0-61dfb6d9594f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.500657] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7000bf-c28a-4bf8-8355-1151bc6ed5da {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.508308] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0898498-32ac-4491-85dc-dc51f4b99be9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.521486] env[69927]: DEBUG nova.compute.provider_tree [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1360.531304] env[69927]: DEBUG oslo_concurrency.lockutils [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "6bfafa83-a9e2-4f7d-bbad-6b356f173b68" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1360.882891] env[69927]: DEBUG nova.network.neutron [req-bbf1804d-ca43-4611-a94c-ec967414ad26 req-b0fd30c4-ab11-4e38-bd74-a75c9e4d9ff1 service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Updated VIF entry in instance network info cache for port cdd401dc-1a64-40f2-931e-f19611bb56d3. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1360.883274] env[69927]: DEBUG nova.network.neutron [req-bbf1804d-ca43-4611-a94c-ec967414ad26 req-b0fd30c4-ab11-4e38-bd74-a75c9e4d9ff1 service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Updating instance_info_cache with network_info: [{"id": "cdd401dc-1a64-40f2-931e-f19611bb56d3", "address": "fa:16:3e:fc:81:80", "network": {"id": "bd90fedf-a086-42e6-9814-07044a035f6e", "bridge": null, "label": "tempest-ServerActionsTestOtherB-951403655-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cf6bb3492c642aa9a168e484299289c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapcdd401dc-1a", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1361.024404] env[69927]: DEBUG nova.scheduler.client.report [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1361.346398] env[69927]: DEBUG nova.virt.hardware [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1361.346669] env[69927]: DEBUG nova.virt.hardware [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1361.346829] env[69927]: DEBUG nova.virt.hardware [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1361.347013] env[69927]: DEBUG nova.virt.hardware [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1361.347166] env[69927]: DEBUG nova.virt.hardware [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1361.347312] env[69927]: DEBUG nova.virt.hardware [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1361.347526] env[69927]: DEBUG nova.virt.hardware [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1361.347690] env[69927]: DEBUG nova.virt.hardware [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1361.347872] env[69927]: DEBUG nova.virt.hardware [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1361.348047] env[69927]: DEBUG nova.virt.hardware [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1361.348228] env[69927]: DEBUG nova.virt.hardware [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1361.349131] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58ce5298-f0c2-4694-82ab-7996279b4112 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.358048] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a8bab8-f569-4feb-b863-a31ecc9c4718 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.373216] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:5d:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0248a27a-1d7f-4195-987b-06bfc8467347', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d10cd3a-98db-44a5-a193-057f208e8f72', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1361.380407] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1361.380646] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1361.380861] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47214614-f23b-4460-9fe5-559fb789d055 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.394772] env[69927]: DEBUG oslo_concurrency.lockutils [req-bbf1804d-ca43-4611-a94c-ec967414ad26 req-b0fd30c4-ab11-4e38-bd74-a75c9e4d9ff1 service nova] Releasing lock "refresh_cache-6bfafa83-a9e2-4f7d-bbad-6b356f173b68" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1361.400969] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1361.400969] env[69927]: value = "task-4097026" [ 1361.400969] env[69927]: _type = "Task" [ 1361.400969] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.409213] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097026, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.529625] env[69927]: DEBUG oslo_concurrency.lockutils [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.151s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1361.532591] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 1.138s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1361.532841] env[69927]: DEBUG nova.objects.instance [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69927) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1361.911659] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097026, 'name': CreateVM_Task, 'duration_secs': 0.34676} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.911874] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1361.912492] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1361.912663] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1361.913051] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1361.913317] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5acc0771-8827-4e32-8712-98da8cbaca46 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.918314] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1361.918314] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52efa341-0ad5-d07d-e6b2-25cac06a45c8" [ 1361.918314] env[69927]: _type = "Task" [ 1361.918314] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.926461] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52efa341-0ad5-d07d-e6b2-25cac06a45c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.042984] env[69927]: DEBUG oslo_concurrency.lockutils [None req-498c1905-e28c-45aa-9d8f-4af647c169d5 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "6bfafa83-a9e2-4f7d-bbad-6b356f173b68" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 21.295s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1362.043817] env[69927]: DEBUG oslo_concurrency.lockutils [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "6bfafa83-a9e2-4f7d-bbad-6b356f173b68" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.513s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1362.044331] env[69927]: INFO nova.compute.manager [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Unshelving [ 1362.430724] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52efa341-0ad5-d07d-e6b2-25cac06a45c8, 'name': SearchDatastore_Task, 'duration_secs': 0.015866} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.431141] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1362.431278] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1362.431515] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1362.431665] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1362.431843] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1362.432130] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cff31c05-78f0-475e-9795-3a36b32d5fbb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.441670] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1362.441890] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1362.442628] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6548b4e9-f8ed-4429-878f-4028b51e98d5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.448672] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1362.448672] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522d3d51-7b41-255e-c902-834c7e95abb4" [ 1362.448672] env[69927]: _type = "Task" [ 1362.448672] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.457268] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522d3d51-7b41-255e-c902-834c7e95abb4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.544852] env[69927]: DEBUG oslo_concurrency.lockutils [None req-3158f0d1-3e5c-4401-ad1e-1f0f13e99bab tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1362.959420] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522d3d51-7b41-255e-c902-834c7e95abb4, 'name': SearchDatastore_Task, 'duration_secs': 0.009961} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.960202] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eba534be-b259-4927-9583-4450a7c14251 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.965520] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1362.965520] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f1841d-f822-57d2-b269-480f1b0e2f36" [ 1362.965520] env[69927]: _type = "Task" [ 1362.965520] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.973628] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f1841d-f822-57d2-b269-480f1b0e2f36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.071767] env[69927]: DEBUG oslo_concurrency.lockutils [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1363.072078] env[69927]: DEBUG oslo_concurrency.lockutils [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1363.072300] env[69927]: DEBUG nova.objects.instance [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lazy-loading 'pci_requests' on Instance uuid 6bfafa83-a9e2-4f7d-bbad-6b356f173b68 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1363.477713] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f1841d-f822-57d2-b269-480f1b0e2f36, 'name': SearchDatastore_Task, 'duration_secs': 0.009738} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.478351] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1363.478351] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 95c47fc8-fed0-4b55-8f65-61b46861e51d/95c47fc8-fed0-4b55-8f65-61b46861e51d.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1363.478545] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b54332d5-8754-4ae7-b238-f26d1a92521a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.486179] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1363.486179] env[69927]: value = "task-4097027" [ 1363.486179] env[69927]: _type = "Task" [ 1363.486179] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.493872] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097027, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.576251] env[69927]: DEBUG nova.objects.instance [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lazy-loading 'numa_topology' on Instance uuid 6bfafa83-a9e2-4f7d-bbad-6b356f173b68 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1363.999803] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097027, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.456115} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.000098] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 95c47fc8-fed0-4b55-8f65-61b46861e51d/95c47fc8-fed0-4b55-8f65-61b46861e51d.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1364.000331] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1364.000650] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a21664c7-4e30-4192-a92b-ff29ab5cab25 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.011448] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1364.011448] env[69927]: value = "task-4097028" [ 1364.011448] env[69927]: _type = "Task" [ 1364.011448] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.020831] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097028, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.078718] env[69927]: INFO nova.compute.claims [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1364.521348] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097028, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072476} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.521845] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1364.522444] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a6b9e36-f047-440b-8008-577f054abd1a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.544641] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 95c47fc8-fed0-4b55-8f65-61b46861e51d/95c47fc8-fed0-4b55-8f65-61b46861e51d.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1364.544868] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8a90b41-65b2-4a10-bee8-47092379ebff {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.563818] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1364.563818] env[69927]: value = "task-4097029" [ 1364.563818] env[69927]: _type = "Task" [ 1364.563818] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.573441] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097029, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.074516] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097029, 'name': ReconfigVM_Task, 'duration_secs': 0.283488} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.074848] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 95c47fc8-fed0-4b55-8f65-61b46861e51d/95c47fc8-fed0-4b55-8f65-61b46861e51d.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1365.075484] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-464f57b4-c742-400d-ac5e-be79a88e3ee6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.081981] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1365.081981] env[69927]: value = "task-4097030" [ 1365.081981] env[69927]: _type = "Task" [ 1365.081981] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.092331] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097030, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.163466] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcc7fbad-316c-40ea-b87f-4f624255c299 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.171355] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfe7404a-8ceb-41b1-9938-390041c029dc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.204443] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d88eba-3ad8-4523-8ed4-8d1eb8e5e1da {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.212968] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f881195-dbc4-4973-98ea-156035db5126 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.227270] env[69927]: DEBUG nova.compute.provider_tree [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1365.591900] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097030, 'name': Rename_Task, 'duration_secs': 0.148644} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.592314] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1365.592446] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-438e2712-cb6f-44e0-97ff-500306080bac {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.598829] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1365.598829] env[69927]: value = "task-4097031" [ 1365.598829] env[69927]: _type = "Task" [ 1365.598829] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.606578] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097031, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.731086] env[69927]: DEBUG nova.scheduler.client.report [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1366.108931] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097031, 'name': PowerOnVM_Task, 'duration_secs': 0.478996} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.109249] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1366.109458] env[69927]: DEBUG nova.compute.manager [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1366.110261] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a80f07f8-5033-48ba-9bf1-88dfb85f6d12 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.236636] env[69927]: DEBUG oslo_concurrency.lockutils [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.164s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1366.265903] env[69927]: INFO nova.network.neutron [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Updating port cdd401dc-1a64-40f2-931e-f19611bb56d3 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1366.621667] env[69927]: INFO nova.compute.manager [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] bringing vm to original state: 'stopped' [ 1367.631230] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "95c47fc8-fed0-4b55-8f65-61b46861e51d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1367.631230] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "95c47fc8-fed0-4b55-8f65-61b46861e51d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1367.631230] env[69927]: DEBUG nova.compute.manager [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1367.631230] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a936d5d3-9cbf-4041-8765-83e6af5635fc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.640370] env[69927]: DEBUG nova.compute.manager [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69927) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1367.661985] env[69927]: DEBUG nova.compute.manager [req-a7e3b822-3591-4591-b3c2-cd372a8bec18 req-3ed4d722-c106-4056-96ed-09969d8a935a service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Received event network-vif-plugged-cdd401dc-1a64-40f2-931e-f19611bb56d3 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1367.662198] env[69927]: DEBUG oslo_concurrency.lockutils [req-a7e3b822-3591-4591-b3c2-cd372a8bec18 req-3ed4d722-c106-4056-96ed-09969d8a935a service nova] Acquiring lock "6bfafa83-a9e2-4f7d-bbad-6b356f173b68-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1367.662410] env[69927]: DEBUG oslo_concurrency.lockutils [req-a7e3b822-3591-4591-b3c2-cd372a8bec18 req-3ed4d722-c106-4056-96ed-09969d8a935a service nova] Lock "6bfafa83-a9e2-4f7d-bbad-6b356f173b68-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1367.662581] env[69927]: DEBUG oslo_concurrency.lockutils [req-a7e3b822-3591-4591-b3c2-cd372a8bec18 req-3ed4d722-c106-4056-96ed-09969d8a935a service nova] Lock "6bfafa83-a9e2-4f7d-bbad-6b356f173b68-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1367.662754] env[69927]: DEBUG nova.compute.manager [req-a7e3b822-3591-4591-b3c2-cd372a8bec18 req-3ed4d722-c106-4056-96ed-09969d8a935a service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] No waiting events found dispatching network-vif-plugged-cdd401dc-1a64-40f2-931e-f19611bb56d3 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1367.662914] env[69927]: WARNING nova.compute.manager [req-a7e3b822-3591-4591-b3c2-cd372a8bec18 req-3ed4d722-c106-4056-96ed-09969d8a935a service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Received unexpected event network-vif-plugged-cdd401dc-1a64-40f2-931e-f19611bb56d3 for instance with vm_state shelved_offloaded and task_state spawning. [ 1367.762286] env[69927]: DEBUG oslo_concurrency.lockutils [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "refresh_cache-6bfafa83-a9e2-4f7d-bbad-6b356f173b68" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1367.762341] env[69927]: DEBUG oslo_concurrency.lockutils [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquired lock "refresh_cache-6bfafa83-a9e2-4f7d-bbad-6b356f173b68" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1367.762532] env[69927]: DEBUG nova.network.neutron [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1368.145940] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1368.146248] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fe2dd28b-9c9b-4f4b-af7e-2d07ef1d9126 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.155379] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1368.155379] env[69927]: value = "task-4097032" [ 1368.155379] env[69927]: _type = "Task" [ 1368.155379] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.165802] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097032, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.487716] env[69927]: DEBUG nova.network.neutron [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Updating instance_info_cache with network_info: [{"id": "cdd401dc-1a64-40f2-931e-f19611bb56d3", "address": "fa:16:3e:fc:81:80", "network": {"id": "bd90fedf-a086-42e6-9814-07044a035f6e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-951403655-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cf6bb3492c642aa9a168e484299289c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdd401dc-1a", "ovs_interfaceid": "cdd401dc-1a64-40f2-931e-f19611bb56d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1368.668660] env[69927]: DEBUG oslo_vmware.api [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097032, 'name': PowerOffVM_Task, 'duration_secs': 0.246792} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.669066] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1368.669276] env[69927]: DEBUG nova.compute.manager [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1368.670423] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be61524-b26a-4639-bdf4-1bcb2e26a424 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.991542] env[69927]: DEBUG oslo_concurrency.lockutils [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Releasing lock "refresh_cache-6bfafa83-a9e2-4f7d-bbad-6b356f173b68" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1369.019749] env[69927]: DEBUG nova.virt.hardware [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='16adcf1f8a2700fb45c1edb65d34d83e',container_format='bare',created_at=2025-05-13T19:48:14Z,direct_url=,disk_format='vmdk',id=10dd49fa-1d39-4594-8a0f-2e3fe886eb62,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-1347937074-shelved',owner='9cf6bb3492c642aa9a168e484299289c',properties=ImageMetaProps,protected=,size=31660544,status='active',tags=,updated_at=2025-05-13T19:48:27Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1369.020026] env[69927]: DEBUG nova.virt.hardware [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1369.020193] env[69927]: DEBUG nova.virt.hardware [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1369.020382] env[69927]: DEBUG nova.virt.hardware [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1369.020531] env[69927]: DEBUG nova.virt.hardware [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1369.020715] env[69927]: DEBUG nova.virt.hardware [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1369.020935] env[69927]: DEBUG nova.virt.hardware [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1369.021115] env[69927]: DEBUG nova.virt.hardware [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1369.021292] env[69927]: DEBUG nova.virt.hardware [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1369.021457] env[69927]: DEBUG nova.virt.hardware [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1369.021642] env[69927]: DEBUG nova.virt.hardware [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1369.022536] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1229845-22a1-4c91-8730-31d889f0c01a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.030731] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182fb711-3885-4b85-b510-651863620cf4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.045230] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:81:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1cbd5e0e-9116-46f1-9748-13a73d2d7e75', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cdd401dc-1a64-40f2-931e-f19611bb56d3', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1369.052626] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1369.052889] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1369.053113] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a2cbad7b-c21e-4a09-84f7-5ae04930ac45 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.073259] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1369.073259] env[69927]: value = "task-4097033" [ 1369.073259] env[69927]: _type = "Task" [ 1369.073259] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.081699] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097033, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.186328] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "95c47fc8-fed0-4b55-8f65-61b46861e51d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.557s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1369.583639] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097033, 'name': CreateVM_Task, 'duration_secs': 0.34931} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.583828] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1369.584523] env[69927]: DEBUG oslo_concurrency.lockutils [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/10dd49fa-1d39-4594-8a0f-2e3fe886eb62" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.584740] env[69927]: DEBUG oslo_concurrency.lockutils [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquired lock "[datastore2] devstack-image-cache_base/10dd49fa-1d39-4594-8a0f-2e3fe886eb62" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1369.585159] env[69927]: DEBUG oslo_concurrency.lockutils [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/10dd49fa-1d39-4594-8a0f-2e3fe886eb62" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1369.585475] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6fe99e2-9e1f-45ae-a63b-7f2bd9ba5b58 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.589979] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1369.589979] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e2ef87-1072-b9a0-d0d2-07e4a75fb2f3" [ 1369.589979] env[69927]: _type = "Task" [ 1369.589979] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.599013] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52e2ef87-1072-b9a0-d0d2-07e4a75fb2f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.694362] env[69927]: DEBUG nova.compute.manager [req-365c82dc-83e1-4fdb-84cf-4ec6942b9a2b req-3232cf80-4bb4-44a8-a9dd-ef639ba3f7af service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Received event network-changed-cdd401dc-1a64-40f2-931e-f19611bb56d3 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1369.694362] env[69927]: DEBUG nova.compute.manager [req-365c82dc-83e1-4fdb-84cf-4ec6942b9a2b req-3232cf80-4bb4-44a8-a9dd-ef639ba3f7af service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Refreshing instance network info cache due to event network-changed-cdd401dc-1a64-40f2-931e-f19611bb56d3. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1369.694362] env[69927]: DEBUG oslo_concurrency.lockutils [req-365c82dc-83e1-4fdb-84cf-4ec6942b9a2b req-3232cf80-4bb4-44a8-a9dd-ef639ba3f7af service nova] Acquiring lock "refresh_cache-6bfafa83-a9e2-4f7d-bbad-6b356f173b68" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.694362] env[69927]: DEBUG oslo_concurrency.lockutils [req-365c82dc-83e1-4fdb-84cf-4ec6942b9a2b req-3232cf80-4bb4-44a8-a9dd-ef639ba3f7af service nova] Acquired lock "refresh_cache-6bfafa83-a9e2-4f7d-bbad-6b356f173b68" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1369.694729] env[69927]: DEBUG nova.network.neutron [req-365c82dc-83e1-4fdb-84cf-4ec6942b9a2b req-3232cf80-4bb4-44a8-a9dd-ef639ba3f7af service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Refreshing network info cache for port cdd401dc-1a64-40f2-931e-f19611bb56d3 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1369.697371] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1369.697678] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1369.697860] env[69927]: DEBUG nova.objects.instance [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69927) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1370.100785] env[69927]: DEBUG oslo_concurrency.lockutils [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Releasing lock "[datastore2] devstack-image-cache_base/10dd49fa-1d39-4594-8a0f-2e3fe886eb62" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1370.101088] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Processing image 10dd49fa-1d39-4594-8a0f-2e3fe886eb62 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1370.101330] env[69927]: DEBUG oslo_concurrency.lockutils [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/10dd49fa-1d39-4594-8a0f-2e3fe886eb62/10dd49fa-1d39-4594-8a0f-2e3fe886eb62.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1370.101478] env[69927]: DEBUG oslo_concurrency.lockutils [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquired lock "[datastore2] devstack-image-cache_base/10dd49fa-1d39-4594-8a0f-2e3fe886eb62/10dd49fa-1d39-4594-8a0f-2e3fe886eb62.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1370.101681] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1370.101970] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-420c7151-9811-445a-9be1-1ce907ba8e34 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.111564] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1370.111805] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1370.112457] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eed20977-edda-4daf-8b8b-d6d7d082a582 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.117861] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1370.117861] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a733dc-d831-2b4f-8d5d-ccdae1b99cd9" [ 1370.117861] env[69927]: _type = "Task" [ 1370.117861] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.126020] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a733dc-d831-2b4f-8d5d-ccdae1b99cd9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.400972] env[69927]: DEBUG nova.network.neutron [req-365c82dc-83e1-4fdb-84cf-4ec6942b9a2b req-3232cf80-4bb4-44a8-a9dd-ef639ba3f7af service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Updated VIF entry in instance network info cache for port cdd401dc-1a64-40f2-931e-f19611bb56d3. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1370.401353] env[69927]: DEBUG nova.network.neutron [req-365c82dc-83e1-4fdb-84cf-4ec6942b9a2b req-3232cf80-4bb4-44a8-a9dd-ef639ba3f7af service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Updating instance_info_cache with network_info: [{"id": "cdd401dc-1a64-40f2-931e-f19611bb56d3", "address": "fa:16:3e:fc:81:80", "network": {"id": "bd90fedf-a086-42e6-9814-07044a035f6e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-951403655-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cf6bb3492c642aa9a168e484299289c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdd401dc-1a", "ovs_interfaceid": "cdd401dc-1a64-40f2-931e-f19611bb56d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1370.628283] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Preparing fetch location {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1370.628526] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Fetch image to [datastore2] OSTACK_IMG_4925664a-3b0d-41c5-93fd-f58b54e08c59/OSTACK_IMG_4925664a-3b0d-41c5-93fd-f58b54e08c59.vmdk {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1370.628750] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Downloading stream optimized image 10dd49fa-1d39-4594-8a0f-2e3fe886eb62 to [datastore2] OSTACK_IMG_4925664a-3b0d-41c5-93fd-f58b54e08c59/OSTACK_IMG_4925664a-3b0d-41c5-93fd-f58b54e08c59.vmdk on the data store datastore2 as vApp {{(pid=69927) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1370.628931] env[69927]: DEBUG nova.virt.vmwareapi.images [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Downloading image file data 10dd49fa-1d39-4594-8a0f-2e3fe886eb62 to the ESX as VM named 'OSTACK_IMG_4925664a-3b0d-41c5-93fd-f58b54e08c59' {{(pid=69927) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1370.642251] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "95c47fc8-fed0-4b55-8f65-61b46861e51d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1370.642485] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "95c47fc8-fed0-4b55-8f65-61b46861e51d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1370.642703] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "95c47fc8-fed0-4b55-8f65-61b46861e51d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1370.642884] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "95c47fc8-fed0-4b55-8f65-61b46861e51d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1370.643085] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "95c47fc8-fed0-4b55-8f65-61b46861e51d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1370.646508] env[69927]: INFO nova.compute.manager [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Terminating instance [ 1370.701042] env[69927]: DEBUG oslo_vmware.rw_handles [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1370.701042] env[69927]: value = "resgroup-9" [ 1370.701042] env[69927]: _type = "ResourcePool" [ 1370.701042] env[69927]: }. {{(pid=69927) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1370.701454] env[69927]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-b777bce6-d39a-499b-95b3-9dd843e30cb6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.717601] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9798b8ea-370a-4c28-9454-3f903af63278 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1370.724794] env[69927]: DEBUG oslo_vmware.rw_handles [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lease: (returnval){ [ 1370.724794] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528471e0-c0b0-36a0-5d50-e791a4655f6a" [ 1370.724794] env[69927]: _type = "HttpNfcLease" [ 1370.724794] env[69927]: } obtained for vApp import into resource pool (val){ [ 1370.724794] env[69927]: value = "resgroup-9" [ 1370.724794] env[69927]: _type = "ResourcePool" [ 1370.724794] env[69927]: }. {{(pid=69927) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1370.725235] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the lease: (returnval){ [ 1370.725235] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528471e0-c0b0-36a0-5d50-e791a4655f6a" [ 1370.725235] env[69927]: _type = "HttpNfcLease" [ 1370.725235] env[69927]: } to be ready. {{(pid=69927) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1370.731779] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1370.731779] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528471e0-c0b0-36a0-5d50-e791a4655f6a" [ 1370.731779] env[69927]: _type = "HttpNfcLease" [ 1370.731779] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1370.903774] env[69927]: DEBUG oslo_concurrency.lockutils [req-365c82dc-83e1-4fdb-84cf-4ec6942b9a2b req-3232cf80-4bb4-44a8-a9dd-ef639ba3f7af service nova] Releasing lock "refresh_cache-6bfafa83-a9e2-4f7d-bbad-6b356f173b68" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1371.151642] env[69927]: DEBUG nova.compute.manager [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1371.151924] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1371.152837] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf9093e0-49f6-4763-8fe1-d79c95b61214 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.161377] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1371.161584] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8a44a529-9d40-4200-8d2e-69d4244a21db {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.232805] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1371.233096] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1371.233279] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Deleting the datastore file [datastore1] 95c47fc8-fed0-4b55-8f65-61b46861e51d {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1371.233557] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3cf006af-e0c6-46a0-a8c5-865ebcde9d89 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.236903] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1371.236903] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528471e0-c0b0-36a0-5d50-e791a4655f6a" [ 1371.236903] env[69927]: _type = "HttpNfcLease" [ 1371.236903] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1371.243377] env[69927]: DEBUG oslo_vmware.api [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1371.243377] env[69927]: value = "task-4097036" [ 1371.243377] env[69927]: _type = "Task" [ 1371.243377] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.253808] env[69927]: DEBUG oslo_vmware.api [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097036, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.733168] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1371.733168] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528471e0-c0b0-36a0-5d50-e791a4655f6a" [ 1371.733168] env[69927]: _type = "HttpNfcLease" [ 1371.733168] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1371.752861] env[69927]: DEBUG oslo_vmware.api [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097036, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149585} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.753119] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1371.753313] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1371.753490] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1371.753665] env[69927]: INFO nova.compute.manager [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1371.753907] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1371.754115] env[69927]: DEBUG nova.compute.manager [-] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1371.754209] env[69927]: DEBUG nova.network.neutron [-] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1372.005165] env[69927]: DEBUG nova.compute.manager [req-e4f3b5ca-66ba-4bd5-9687-e6ce75a2af9e req-9efef94f-21d6-4e99-9dd4-17690a4de99f service nova] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Received event network-vif-deleted-4d10cd3a-98db-44a5-a193-057f208e8f72 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1372.005393] env[69927]: INFO nova.compute.manager [req-e4f3b5ca-66ba-4bd5-9687-e6ce75a2af9e req-9efef94f-21d6-4e99-9dd4-17690a4de99f service nova] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Neutron deleted interface 4d10cd3a-98db-44a5-a193-057f208e8f72; detaching it from the instance and deleting it from the info cache [ 1372.005681] env[69927]: DEBUG nova.network.neutron [req-e4f3b5ca-66ba-4bd5-9687-e6ce75a2af9e req-9efef94f-21d6-4e99-9dd4-17690a4de99f service nova] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1372.234642] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1372.234642] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528471e0-c0b0-36a0-5d50-e791a4655f6a" [ 1372.234642] env[69927]: _type = "HttpNfcLease" [ 1372.234642] env[69927]: } is initializing. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1372.489123] env[69927]: DEBUG nova.network.neutron [-] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1372.508623] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ae96fa3b-1d42-479b-b50f-ba5b7b1e5ca1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.519206] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-285dee1b-8d41-4def-9b6a-87c478516061 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.549089] env[69927]: DEBUG nova.compute.manager [req-e4f3b5ca-66ba-4bd5-9687-e6ce75a2af9e req-9efef94f-21d6-4e99-9dd4-17690a4de99f service nova] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Detach interface failed, port_id=4d10cd3a-98db-44a5-a193-057f208e8f72, reason: Instance 95c47fc8-fed0-4b55-8f65-61b46861e51d could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1372.736092] env[69927]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1372.736092] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528471e0-c0b0-36a0-5d50-e791a4655f6a" [ 1372.736092] env[69927]: _type = "HttpNfcLease" [ 1372.736092] env[69927]: } is ready. {{(pid=69927) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1372.736927] env[69927]: DEBUG oslo_vmware.rw_handles [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1372.736927] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528471e0-c0b0-36a0-5d50-e791a4655f6a" [ 1372.736927] env[69927]: _type = "HttpNfcLease" [ 1372.736927] env[69927]: }. {{(pid=69927) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1372.737462] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3c76bd3-d0b4-4c60-a4ae-bc819a0fb168 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.746505] env[69927]: DEBUG oslo_vmware.rw_handles [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52604582-c3a9-fd8d-8281-d4d603a6e999/disk-0.vmdk from lease info. {{(pid=69927) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1372.746906] env[69927]: DEBUG oslo_vmware.rw_handles [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Creating HTTP connection to write to file with size = 31660544 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52604582-c3a9-fd8d-8281-d4d603a6e999/disk-0.vmdk. {{(pid=69927) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1372.809676] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ac8cfd30-a643-4e8b-8671-7085e444a891 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.991932] env[69927]: INFO nova.compute.manager [-] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Took 1.24 seconds to deallocate network for instance. [ 1373.500459] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1373.500795] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1373.501102] env[69927]: DEBUG nova.objects.instance [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lazy-loading 'resources' on Instance uuid 95c47fc8-fed0-4b55-8f65-61b46861e51d {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1373.935926] env[69927]: DEBUG oslo_vmware.rw_handles [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Completed reading data from the image iterator. {{(pid=69927) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1373.936350] env[69927]: DEBUG oslo_vmware.rw_handles [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52604582-c3a9-fd8d-8281-d4d603a6e999/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1373.937702] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9d60bb-307f-4abb-923b-fedea12fad86 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.944935] env[69927]: DEBUG oslo_vmware.rw_handles [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52604582-c3a9-fd8d-8281-d4d603a6e999/disk-0.vmdk is in state: ready. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1373.945231] env[69927]: DEBUG oslo_vmware.rw_handles [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52604582-c3a9-fd8d-8281-d4d603a6e999/disk-0.vmdk. {{(pid=69927) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1373.945622] env[69927]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-a7e797ec-5d2e-4247-9665-fd95ba991cd4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.083186] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45310854-750b-455f-a474-b8df25a0f7f3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.090738] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baf4710f-4dc9-4126-8b26-e822d29f2a97 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.120948] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db087465-cf99-4dc4-9d52-dd457f875947 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.129103] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-932adb65-5a07-4af8-be08-aa603ece58e0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.142849] env[69927]: DEBUG nova.compute.provider_tree [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1374.154887] env[69927]: DEBUG oslo_vmware.rw_handles [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52604582-c3a9-fd8d-8281-d4d603a6e999/disk-0.vmdk. {{(pid=69927) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1374.155164] env[69927]: INFO nova.virt.vmwareapi.images [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Downloaded image file data 10dd49fa-1d39-4594-8a0f-2e3fe886eb62 [ 1374.156235] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7efc5f3f-bfae-4c4b-ba12-d6f33c0552fc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.171923] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c0d3e0d5-28a0-4cf4-af12-979ca473f28a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.208882] env[69927]: INFO nova.virt.vmwareapi.images [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] The imported VM was unregistered [ 1374.211241] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Caching image {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1374.211495] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Creating directory with path [datastore2] devstack-image-cache_base/10dd49fa-1d39-4594-8a0f-2e3fe886eb62 {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1374.211787] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-89c40239-ab37-40c4-bd7c-ec98bad6b606 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.224530] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Created directory with path [datastore2] devstack-image-cache_base/10dd49fa-1d39-4594-8a0f-2e3fe886eb62 {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1374.224714] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_4925664a-3b0d-41c5-93fd-f58b54e08c59/OSTACK_IMG_4925664a-3b0d-41c5-93fd-f58b54e08c59.vmdk to [datastore2] devstack-image-cache_base/10dd49fa-1d39-4594-8a0f-2e3fe886eb62/10dd49fa-1d39-4594-8a0f-2e3fe886eb62.vmdk. {{(pid=69927) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1374.224969] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-bbed6b41-7653-4f37-b87c-796d6ed6f51a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.232393] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1374.232393] env[69927]: value = "task-4097038" [ 1374.232393] env[69927]: _type = "Task" [ 1374.232393] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.240052] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097038, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.647029] env[69927]: DEBUG nova.scheduler.client.report [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1374.744421] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097038, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.152036] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.651s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1375.175544] env[69927]: INFO nova.scheduler.client.report [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Deleted allocations for instance 95c47fc8-fed0-4b55-8f65-61b46861e51d [ 1375.244410] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097038, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.684293] env[69927]: DEBUG oslo_concurrency.lockutils [None req-9eadf668-ec58-43be-81ed-96c11c1e9925 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "95c47fc8-fed0-4b55-8f65-61b46861e51d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.042s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1375.744503] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097038, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.246482] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097038, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.744638] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097038, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.342921} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.744907] env[69927]: INFO nova.virt.vmwareapi.ds_util [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_4925664a-3b0d-41c5-93fd-f58b54e08c59/OSTACK_IMG_4925664a-3b0d-41c5-93fd-f58b54e08c59.vmdk to [datastore2] devstack-image-cache_base/10dd49fa-1d39-4594-8a0f-2e3fe886eb62/10dd49fa-1d39-4594-8a0f-2e3fe886eb62.vmdk. [ 1376.745118] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Cleaning up location [datastore2] OSTACK_IMG_4925664a-3b0d-41c5-93fd-f58b54e08c59 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1376.745294] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_4925664a-3b0d-41c5-93fd-f58b54e08c59 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1376.745544] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-483a7141-2226-4872-b4bc-23705c62729b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.751702] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1376.751702] env[69927]: value = "task-4097039" [ 1376.751702] env[69927]: _type = "Task" [ 1376.751702] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.760087] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097039, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.263086] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097039, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.054128} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.263086] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1377.263469] env[69927]: DEBUG oslo_concurrency.lockutils [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Releasing lock "[datastore2] devstack-image-cache_base/10dd49fa-1d39-4594-8a0f-2e3fe886eb62/10dd49fa-1d39-4594-8a0f-2e3fe886eb62.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1377.263469] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/10dd49fa-1d39-4594-8a0f-2e3fe886eb62/10dd49fa-1d39-4594-8a0f-2e3fe886eb62.vmdk to [datastore2] 6bfafa83-a9e2-4f7d-bbad-6b356f173b68/6bfafa83-a9e2-4f7d-bbad-6b356f173b68.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1377.263710] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c62be349-302c-4c94-8093-7cb451457261 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.270992] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1377.270992] env[69927]: value = "task-4097040" [ 1377.270992] env[69927]: _type = "Task" [ 1377.270992] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.278957] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097040, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.569101] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "7a1f95d9-892e-492d-acbe-d70b56c36698" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1377.569377] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "7a1f95d9-892e-492d-acbe-d70b56c36698" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1377.782036] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097040, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.072141] env[69927]: DEBUG nova.compute.manager [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1378.282960] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097040, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.597087] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1378.597464] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1378.599307] env[69927]: INFO nova.compute.claims [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1378.783216] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097040, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.284227] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097040, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.507786] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1379.675157] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1674dbc8-accd-49ab-88f1-b609e5d27a51 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.683315] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e00aee32-fa6f-4b4b-97fd-1d81f008fb59 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.715046] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-774333eb-0202-485d-bfbc-38161c920880 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.721637] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d7e6ea0-b5bd-4023-a23f-bdb2b86ac055 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.734819] env[69927]: DEBUG nova.compute.provider_tree [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1379.784683] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097040, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.275746} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.784935] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/10dd49fa-1d39-4594-8a0f-2e3fe886eb62/10dd49fa-1d39-4594-8a0f-2e3fe886eb62.vmdk to [datastore2] 6bfafa83-a9e2-4f7d-bbad-6b356f173b68/6bfafa83-a9e2-4f7d-bbad-6b356f173b68.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1379.785761] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0fc0131-972a-425c-b269-e8ccf40cc30b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.808907] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Reconfiguring VM instance instance-00000077 to attach disk [datastore2] 6bfafa83-a9e2-4f7d-bbad-6b356f173b68/6bfafa83-a9e2-4f7d-bbad-6b356f173b68.vmdk or device None with type streamOptimized {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1379.809173] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0974ac9e-70ac-44c9-b6ae-6f80304528e5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.829338] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1379.829338] env[69927]: value = "task-4097041" [ 1379.829338] env[69927]: _type = "Task" [ 1379.829338] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.837468] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097041, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.238071] env[69927]: DEBUG nova.scheduler.client.report [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1380.341755] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097041, 'name': ReconfigVM_Task, 'duration_secs': 0.2842} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.342481] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Reconfigured VM instance instance-00000077 to attach disk [datastore2] 6bfafa83-a9e2-4f7d-bbad-6b356f173b68/6bfafa83-a9e2-4f7d-bbad-6b356f173b68.vmdk or device None with type streamOptimized {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1380.343064] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1da8e88b-488b-46f6-bf40-62486b2ab754 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.350758] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1380.350758] env[69927]: value = "task-4097042" [ 1380.350758] env[69927]: _type = "Task" [ 1380.350758] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.363013] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097042, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.742834] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.145s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1380.743373] env[69927]: DEBUG nova.compute.manager [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1380.861418] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097042, 'name': Rename_Task, 'duration_secs': 0.163028} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.861680] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1380.861902] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ba30959-fb1d-4c04-a392-01d612d1dd68 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.868722] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1380.868722] env[69927]: value = "task-4097043" [ 1380.868722] env[69927]: _type = "Task" [ 1380.868722] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.877075] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097043, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.248839] env[69927]: DEBUG nova.compute.utils [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1381.250021] env[69927]: DEBUG nova.compute.manager [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1381.250162] env[69927]: DEBUG nova.network.neutron [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1381.289608] env[69927]: DEBUG nova.policy [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef08164611894c289d4c30194d91526a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0823381b9f644adf818b490c551f5a3f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1381.378681] env[69927]: DEBUG oslo_vmware.api [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097043, 'name': PowerOnVM_Task, 'duration_secs': 0.467314} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.379017] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1381.496519] env[69927]: DEBUG nova.compute.manager [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1381.498032] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67914571-cf6e-48fb-ac6e-199815448638 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.617034] env[69927]: DEBUG nova.network.neutron [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Successfully created port: 6b4007d2-3eea-417e-b36d-28ced978b73f {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1381.753870] env[69927]: DEBUG nova.compute.manager [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1382.015719] env[69927]: DEBUG oslo_concurrency.lockutils [None req-40d44a1f-0b6b-44e7-9c32-982c11aafccc tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "6bfafa83-a9e2-4f7d-bbad-6b356f173b68" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 19.971s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1382.412109] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "6bfafa83-a9e2-4f7d-bbad-6b356f173b68" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1382.412477] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "6bfafa83-a9e2-4f7d-bbad-6b356f173b68" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1382.412590] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "6bfafa83-a9e2-4f7d-bbad-6b356f173b68-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1382.412774] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "6bfafa83-a9e2-4f7d-bbad-6b356f173b68-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1382.412950] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "6bfafa83-a9e2-4f7d-bbad-6b356f173b68-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1382.415403] env[69927]: INFO nova.compute.manager [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Terminating instance [ 1382.764349] env[69927]: DEBUG nova.compute.manager [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1382.791277] env[69927]: DEBUG nova.virt.hardware [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1382.791530] env[69927]: DEBUG nova.virt.hardware [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1382.791687] env[69927]: DEBUG nova.virt.hardware [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1382.791922] env[69927]: DEBUG nova.virt.hardware [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1382.792101] env[69927]: DEBUG nova.virt.hardware [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1382.792342] env[69927]: DEBUG nova.virt.hardware [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1382.792493] env[69927]: DEBUG nova.virt.hardware [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1382.792682] env[69927]: DEBUG nova.virt.hardware [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1382.792946] env[69927]: DEBUG nova.virt.hardware [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1382.793060] env[69927]: DEBUG nova.virt.hardware [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1382.793241] env[69927]: DEBUG nova.virt.hardware [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1382.794120] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c825c5c-30d1-4257-a892-f89455117189 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.802212] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5526b2e3-b81c-40eb-9848-a45559e3c052 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.919969] env[69927]: DEBUG nova.compute.manager [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1382.920238] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1382.921206] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56fabc49-6565-4357-935a-7ef827fca633 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.928771] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1382.929021] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-65e6dfec-8f26-4518-9c99-ccb58c19d10d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.935990] env[69927]: DEBUG oslo_vmware.api [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1382.935990] env[69927]: value = "task-4097044" [ 1382.935990] env[69927]: _type = "Task" [ 1382.935990] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.946334] env[69927]: DEBUG oslo_vmware.api [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097044, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.992141] env[69927]: DEBUG nova.compute.manager [req-d47fc38b-e04b-4373-9076-0aee7639937f req-5023d559-b564-423b-b37a-17bc77c463d0 service nova] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Received event network-vif-plugged-6b4007d2-3eea-417e-b36d-28ced978b73f {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1382.992492] env[69927]: DEBUG oslo_concurrency.lockutils [req-d47fc38b-e04b-4373-9076-0aee7639937f req-5023d559-b564-423b-b37a-17bc77c463d0 service nova] Acquiring lock "7a1f95d9-892e-492d-acbe-d70b56c36698-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1382.992724] env[69927]: DEBUG oslo_concurrency.lockutils [req-d47fc38b-e04b-4373-9076-0aee7639937f req-5023d559-b564-423b-b37a-17bc77c463d0 service nova] Lock "7a1f95d9-892e-492d-acbe-d70b56c36698-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1382.992894] env[69927]: DEBUG oslo_concurrency.lockutils [req-d47fc38b-e04b-4373-9076-0aee7639937f req-5023d559-b564-423b-b37a-17bc77c463d0 service nova] Lock "7a1f95d9-892e-492d-acbe-d70b56c36698-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1382.993101] env[69927]: DEBUG nova.compute.manager [req-d47fc38b-e04b-4373-9076-0aee7639937f req-5023d559-b564-423b-b37a-17bc77c463d0 service nova] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] No waiting events found dispatching network-vif-plugged-6b4007d2-3eea-417e-b36d-28ced978b73f {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1382.993328] env[69927]: WARNING nova.compute.manager [req-d47fc38b-e04b-4373-9076-0aee7639937f req-5023d559-b564-423b-b37a-17bc77c463d0 service nova] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Received unexpected event network-vif-plugged-6b4007d2-3eea-417e-b36d-28ced978b73f for instance with vm_state building and task_state spawning. [ 1383.084863] env[69927]: DEBUG nova.network.neutron [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Successfully updated port: 6b4007d2-3eea-417e-b36d-28ced978b73f {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1383.446286] env[69927]: DEBUG oslo_vmware.api [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097044, 'name': PowerOffVM_Task, 'duration_secs': 0.207693} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.446632] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1383.446729] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1383.446983] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b65d2976-1fa5-4930-8279-5631116c4f38 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.516200] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1383.516422] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1383.516656] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Deleting the datastore file [datastore2] 6bfafa83-a9e2-4f7d-bbad-6b356f173b68 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1383.517083] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a42a956-0109-447f-987e-aa374d48aadb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.523560] env[69927]: DEBUG oslo_vmware.api [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for the task: (returnval){ [ 1383.523560] env[69927]: value = "task-4097046" [ 1383.523560] env[69927]: _type = "Task" [ 1383.523560] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.531441] env[69927]: DEBUG oslo_vmware.api [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097046, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.588478] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "refresh_cache-7a1f95d9-892e-492d-acbe-d70b56c36698" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.588744] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired lock "refresh_cache-7a1f95d9-892e-492d-acbe-d70b56c36698" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1383.588744] env[69927]: DEBUG nova.network.neutron [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1384.032811] env[69927]: DEBUG oslo_vmware.api [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Task: {'id': task-4097046, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147493} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.033114] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1384.033270] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1384.033461] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1384.033643] env[69927]: INFO nova.compute.manager [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1384.033887] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1384.034090] env[69927]: DEBUG nova.compute.manager [-] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1384.034186] env[69927]: DEBUG nova.network.neutron [-] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1384.121194] env[69927]: DEBUG nova.network.neutron [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1384.287433] env[69927]: DEBUG nova.network.neutron [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Updating instance_info_cache with network_info: [{"id": "6b4007d2-3eea-417e-b36d-28ced978b73f", "address": "fa:16:3e:e1:c9:1d", "network": {"id": "1b67ec75-5b1a-4408-976d-0bd585378451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1628882259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0823381b9f644adf818b490c551f5a3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b4007d2-3e", "ovs_interfaceid": "6b4007d2-3eea-417e-b36d-28ced978b73f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1384.790641] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Releasing lock "refresh_cache-7a1f95d9-892e-492d-acbe-d70b56c36698" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1384.791119] env[69927]: DEBUG nova.compute.manager [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Instance network_info: |[{"id": "6b4007d2-3eea-417e-b36d-28ced978b73f", "address": "fa:16:3e:e1:c9:1d", "network": {"id": "1b67ec75-5b1a-4408-976d-0bd585378451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1628882259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0823381b9f644adf818b490c551f5a3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b4007d2-3e", "ovs_interfaceid": "6b4007d2-3eea-417e-b36d-28ced978b73f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1384.791448] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:c9:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0248a27a-1d7f-4195-987b-06bfc8467347', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6b4007d2-3eea-417e-b36d-28ced978b73f', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1384.799038] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1384.799263] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1384.799486] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed93c7b2-d7a7-4633-a126-13826388a478 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.821966] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1384.821966] env[69927]: value = "task-4097047" [ 1384.821966] env[69927]: _type = "Task" [ 1384.821966] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.831210] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097047, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.964122] env[69927]: DEBUG nova.network.neutron [-] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1385.029332] env[69927]: DEBUG nova.compute.manager [req-6482326c-09b4-49f2-b95f-4449e7519aa0 req-160695d2-bb8d-4354-a852-f41ba5c9e2a2 service nova] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Received event network-changed-6b4007d2-3eea-417e-b36d-28ced978b73f {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1385.029687] env[69927]: DEBUG nova.compute.manager [req-6482326c-09b4-49f2-b95f-4449e7519aa0 req-160695d2-bb8d-4354-a852-f41ba5c9e2a2 service nova] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Refreshing instance network info cache due to event network-changed-6b4007d2-3eea-417e-b36d-28ced978b73f. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1385.029960] env[69927]: DEBUG oslo_concurrency.lockutils [req-6482326c-09b4-49f2-b95f-4449e7519aa0 req-160695d2-bb8d-4354-a852-f41ba5c9e2a2 service nova] Acquiring lock "refresh_cache-7a1f95d9-892e-492d-acbe-d70b56c36698" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1385.030282] env[69927]: DEBUG oslo_concurrency.lockutils [req-6482326c-09b4-49f2-b95f-4449e7519aa0 req-160695d2-bb8d-4354-a852-f41ba5c9e2a2 service nova] Acquired lock "refresh_cache-7a1f95d9-892e-492d-acbe-d70b56c36698" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1385.030516] env[69927]: DEBUG nova.network.neutron [req-6482326c-09b4-49f2-b95f-4449e7519aa0 req-160695d2-bb8d-4354-a852-f41ba5c9e2a2 service nova] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Refreshing network info cache for port 6b4007d2-3eea-417e-b36d-28ced978b73f {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1385.332107] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097047, 'name': CreateVM_Task, 'duration_secs': 0.32327} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.332342] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1385.333119] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1385.333311] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1385.333663] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1385.333949] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5d10b30-9d0b-4e3c-b9a7-bfaa66ada33a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.338412] env[69927]: DEBUG oslo_vmware.api [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1385.338412] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5235cdbd-ef69-52db-4af2-1148c915efca" [ 1385.338412] env[69927]: _type = "Task" [ 1385.338412] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.346412] env[69927]: DEBUG oslo_vmware.api [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5235cdbd-ef69-52db-4af2-1148c915efca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.467290] env[69927]: INFO nova.compute.manager [-] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Took 1.43 seconds to deallocate network for instance. [ 1385.502200] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1385.716402] env[69927]: DEBUG nova.network.neutron [req-6482326c-09b4-49f2-b95f-4449e7519aa0 req-160695d2-bb8d-4354-a852-f41ba5c9e2a2 service nova] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Updated VIF entry in instance network info cache for port 6b4007d2-3eea-417e-b36d-28ced978b73f. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1385.716796] env[69927]: DEBUG nova.network.neutron [req-6482326c-09b4-49f2-b95f-4449e7519aa0 req-160695d2-bb8d-4354-a852-f41ba5c9e2a2 service nova] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Updating instance_info_cache with network_info: [{"id": "6b4007d2-3eea-417e-b36d-28ced978b73f", "address": "fa:16:3e:e1:c9:1d", "network": {"id": "1b67ec75-5b1a-4408-976d-0bd585378451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1628882259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0823381b9f644adf818b490c551f5a3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b4007d2-3e", "ovs_interfaceid": "6b4007d2-3eea-417e-b36d-28ced978b73f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1385.849080] env[69927]: DEBUG oslo_vmware.api [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5235cdbd-ef69-52db-4af2-1148c915efca, 'name': SearchDatastore_Task, 'duration_secs': 0.009562} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.849469] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1385.849581] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1385.849820] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1385.849970] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1385.850171] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1385.850747] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-55b46ecb-11f0-4504-9f3b-81763351f341 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.858712] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1385.858891] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1385.859601] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35e8d91a-fabf-41c0-8134-3cff23d9a2aa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.865500] env[69927]: DEBUG oslo_vmware.api [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1385.865500] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52bf7b6b-008f-c444-bd60-3ded1fd54983" [ 1385.865500] env[69927]: _type = "Task" [ 1385.865500] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.873037] env[69927]: DEBUG oslo_vmware.api [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52bf7b6b-008f-c444-bd60-3ded1fd54983, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.974462] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1385.974767] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1385.975061] env[69927]: DEBUG nova.objects.instance [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lazy-loading 'resources' on Instance uuid 6bfafa83-a9e2-4f7d-bbad-6b356f173b68 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1386.219572] env[69927]: DEBUG oslo_concurrency.lockutils [req-6482326c-09b4-49f2-b95f-4449e7519aa0 req-160695d2-bb8d-4354-a852-f41ba5c9e2a2 service nova] Releasing lock "refresh_cache-7a1f95d9-892e-492d-acbe-d70b56c36698" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1386.219836] env[69927]: DEBUG nova.compute.manager [req-6482326c-09b4-49f2-b95f-4449e7519aa0 req-160695d2-bb8d-4354-a852-f41ba5c9e2a2 service nova] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Received event network-vif-deleted-cdd401dc-1a64-40f2-931e-f19611bb56d3 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1386.377039] env[69927]: DEBUG oslo_vmware.api [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52bf7b6b-008f-c444-bd60-3ded1fd54983, 'name': SearchDatastore_Task, 'duration_secs': 0.008885} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.377851] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de82414b-dd89-439f-880d-6d5a7d8bfc90 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.383967] env[69927]: DEBUG oslo_vmware.api [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1386.383967] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5272d539-d796-8354-e20a-9d7b6f03dbec" [ 1386.383967] env[69927]: _type = "Task" [ 1386.383967] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.392584] env[69927]: DEBUG oslo_vmware.api [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5272d539-d796-8354-e20a-9d7b6f03dbec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.507282] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1386.550854] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd561c7-9c28-468f-8d20-dae4f9b16fe7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.558225] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d66244dc-a99e-4c3e-8c81-d3b924db689a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.587526] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48faf782-ec07-4c8f-a72d-8883d9051d82 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.595073] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d173deb-d3fe-413b-a01b-6d911ce9b2bd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.607991] env[69927]: DEBUG nova.compute.provider_tree [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1386.894596] env[69927]: DEBUG oslo_vmware.api [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5272d539-d796-8354-e20a-9d7b6f03dbec, 'name': SearchDatastore_Task, 'duration_secs': 0.00997} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.894927] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1386.895144] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 7a1f95d9-892e-492d-acbe-d70b56c36698/7a1f95d9-892e-492d-acbe-d70b56c36698.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1386.895404] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-56308420-d416-404e-bcd1-ad3656e41200 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.901691] env[69927]: DEBUG oslo_vmware.api [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1386.901691] env[69927]: value = "task-4097048" [ 1386.901691] env[69927]: _type = "Task" [ 1386.901691] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.909211] env[69927]: DEBUG oslo_vmware.api [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097048, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.111487] env[69927]: DEBUG nova.scheduler.client.report [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1387.411430] env[69927]: DEBUG oslo_vmware.api [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097048, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.446182} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.411700] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] 7a1f95d9-892e-492d-acbe-d70b56c36698/7a1f95d9-892e-492d-acbe-d70b56c36698.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1387.411963] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1387.412246] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ad66beb1-e928-408e-b01a-b5f1eb2f1ba0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.419041] env[69927]: DEBUG oslo_vmware.api [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1387.419041] env[69927]: value = "task-4097049" [ 1387.419041] env[69927]: _type = "Task" [ 1387.419041] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.428797] env[69927]: DEBUG oslo_vmware.api [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097049, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.507670] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1387.507912] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1387.507912] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1387.508082] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69927) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1387.508240] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager.update_available_resource {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1387.617134] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.642s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1387.639228] env[69927]: INFO nova.scheduler.client.report [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Deleted allocations for instance 6bfafa83-a9e2-4f7d-bbad-6b356f173b68 [ 1387.929193] env[69927]: DEBUG oslo_vmware.api [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097049, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073269} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.929591] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1387.930516] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c90d815c-a400-4700-a093-6d31596d74c5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.953103] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 7a1f95d9-892e-492d-acbe-d70b56c36698/7a1f95d9-892e-492d-acbe-d70b56c36698.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1387.953745] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ba72ace-3038-44ae-a213-8edf1cdc6b9f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.973595] env[69927]: DEBUG oslo_vmware.api [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1387.973595] env[69927]: value = "task-4097050" [ 1387.973595] env[69927]: _type = "Task" [ 1387.973595] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.981621] env[69927]: DEBUG oslo_vmware.api [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097050, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.010877] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1388.011162] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1388.011337] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1388.011496] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69927) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1388.012420] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-392bb420-3c8e-44fe-90e1-d1a10b4bd342 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.020230] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98456f62-fc4d-43aa-804c-0a7dd6f54e9f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.034139] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef3c8e14-2187-4d88-80da-d317a91e14c7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.040896] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21afe67d-dd91-4750-ad49-10986d7cfe8e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.071152] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180201MB free_disk=17GB free_vcpus=48 pci_devices=None {{(pid=69927) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1388.071390] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1388.071503] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1388.147316] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5bcf54a3-fc57-4799-97ae-a3b4408c8388 tempest-ServerActionsTestOtherB-54229569 tempest-ServerActionsTestOtherB-54229569-project-member] Lock "6bfafa83-a9e2-4f7d-bbad-6b356f173b68" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.735s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1388.485371] env[69927]: DEBUG oslo_vmware.api [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097050, 'name': ReconfigVM_Task, 'duration_secs': 0.3428} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.485693] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 7a1f95d9-892e-492d-acbe-d70b56c36698/7a1f95d9-892e-492d-acbe-d70b56c36698.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1388.486596] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-26ac6a5d-f0a6-48d0-a153-bd05beba637e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.494343] env[69927]: DEBUG oslo_vmware.api [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1388.494343] env[69927]: value = "task-4097051" [ 1388.494343] env[69927]: _type = "Task" [ 1388.494343] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.503679] env[69927]: DEBUG oslo_vmware.api [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097051, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.004596] env[69927]: DEBUG oslo_vmware.api [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097051, 'name': Rename_Task, 'duration_secs': 0.154703} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.004948] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1389.005131] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d436c70b-a645-49f0-9a21-c85f62348dd9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.011403] env[69927]: DEBUG oslo_vmware.api [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1389.011403] env[69927]: value = "task-4097053" [ 1389.011403] env[69927]: _type = "Task" [ 1389.011403] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.018707] env[69927]: DEBUG oslo_vmware.api [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097053, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.097622] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance da468d11-82a4-4fec-b06a-1b522bacdbc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1389.097789] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1389.097948] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 9e9e93cc-e225-4ec7-850f-916aa078ba30 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1389.098106] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 7a1f95d9-892e-492d-acbe-d70b56c36698 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1389.098307] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1389.098448] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1389.160440] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e1b690d-e778-46f4-a611-e6614cd22a9a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.168378] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4392b237-d534-4d12-a58c-b4e4e9aac6c1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.198608] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11c4fcab-a44b-4bb4-bc2d-a08690cfbb0b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.206442] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20ce443-9eaf-44e4-a6d1-a46542f84ec4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.221056] env[69927]: DEBUG nova.compute.provider_tree [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1389.525742] env[69927]: DEBUG oslo_vmware.api [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097053, 'name': PowerOnVM_Task, 'duration_secs': 0.451952} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.526229] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1389.526600] env[69927]: INFO nova.compute.manager [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Took 6.76 seconds to spawn the instance on the hypervisor. [ 1389.526925] env[69927]: DEBUG nova.compute.manager [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1389.528157] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-572cfa54-3a2f-40a7-8264-ec18dec4b2ae {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.724039] env[69927]: DEBUG nova.scheduler.client.report [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1390.048679] env[69927]: INFO nova.compute.manager [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Took 11.47 seconds to build instance. [ 1390.229052] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69927) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1390.229309] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.158s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1390.550189] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2481f842-67cb-4762-8d39-1760d9b47740 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "7a1f95d9-892e-492d-acbe-d70b56c36698" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.981s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1390.786063] env[69927]: DEBUG nova.compute.manager [req-a9669c0d-aa97-408a-8a93-a5a8e9fcc89f req-93aa91f3-9c56-4b30-8dc7-4849ebd8a01f service nova] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Received event network-changed-6b4007d2-3eea-417e-b36d-28ced978b73f {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1390.786787] env[69927]: DEBUG nova.compute.manager [req-a9669c0d-aa97-408a-8a93-a5a8e9fcc89f req-93aa91f3-9c56-4b30-8dc7-4849ebd8a01f service nova] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Refreshing instance network info cache due to event network-changed-6b4007d2-3eea-417e-b36d-28ced978b73f. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1390.786787] env[69927]: DEBUG oslo_concurrency.lockutils [req-a9669c0d-aa97-408a-8a93-a5a8e9fcc89f req-93aa91f3-9c56-4b30-8dc7-4849ebd8a01f service nova] Acquiring lock "refresh_cache-7a1f95d9-892e-492d-acbe-d70b56c36698" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1390.786787] env[69927]: DEBUG oslo_concurrency.lockutils [req-a9669c0d-aa97-408a-8a93-a5a8e9fcc89f req-93aa91f3-9c56-4b30-8dc7-4849ebd8a01f service nova] Acquired lock "refresh_cache-7a1f95d9-892e-492d-acbe-d70b56c36698" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1390.786787] env[69927]: DEBUG nova.network.neutron [req-a9669c0d-aa97-408a-8a93-a5a8e9fcc89f req-93aa91f3-9c56-4b30-8dc7-4849ebd8a01f service nova] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Refreshing network info cache for port 6b4007d2-3eea-417e-b36d-28ced978b73f {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1391.499440] env[69927]: DEBUG nova.network.neutron [req-a9669c0d-aa97-408a-8a93-a5a8e9fcc89f req-93aa91f3-9c56-4b30-8dc7-4849ebd8a01f service nova] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Updated VIF entry in instance network info cache for port 6b4007d2-3eea-417e-b36d-28ced978b73f. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1391.499832] env[69927]: DEBUG nova.network.neutron [req-a9669c0d-aa97-408a-8a93-a5a8e9fcc89f req-93aa91f3-9c56-4b30-8dc7-4849ebd8a01f service nova] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Updating instance_info_cache with network_info: [{"id": "6b4007d2-3eea-417e-b36d-28ced978b73f", "address": "fa:16:3e:e1:c9:1d", "network": {"id": "1b67ec75-5b1a-4408-976d-0bd585378451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1628882259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0823381b9f644adf818b490c551f5a3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b4007d2-3e", "ovs_interfaceid": "6b4007d2-3eea-417e-b36d-28ced978b73f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1392.002631] env[69927]: DEBUG oslo_concurrency.lockutils [req-a9669c0d-aa97-408a-8a93-a5a8e9fcc89f req-93aa91f3-9c56-4b30-8dc7-4849ebd8a01f service nova] Releasing lock "refresh_cache-7a1f95d9-892e-492d-acbe-d70b56c36698" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1392.229332] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1396.625592] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Acquiring lock "f4512f10-36bf-4277-acb7-e09a2a0d2a37" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1396.625925] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Lock "f4512f10-36bf-4277-acb7-e09a2a0d2a37" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1396.800349] env[69927]: DEBUG oslo_concurrency.lockutils [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "9e9e93cc-e225-4ec7-850f-916aa078ba30" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1396.800627] env[69927]: DEBUG oslo_concurrency.lockutils [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "9e9e93cc-e225-4ec7-850f-916aa078ba30" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1396.800973] env[69927]: DEBUG oslo_concurrency.lockutils [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "9e9e93cc-e225-4ec7-850f-916aa078ba30-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1396.801212] env[69927]: DEBUG oslo_concurrency.lockutils [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "9e9e93cc-e225-4ec7-850f-916aa078ba30-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1396.801393] env[69927]: DEBUG oslo_concurrency.lockutils [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "9e9e93cc-e225-4ec7-850f-916aa078ba30-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1396.803776] env[69927]: INFO nova.compute.manager [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Terminating instance [ 1397.130127] env[69927]: DEBUG nova.compute.manager [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1397.307499] env[69927]: DEBUG nova.compute.manager [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1397.307743] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1397.308634] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc20f68c-71c8-4086-bfc2-4639695ba6db {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.317414] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1397.317725] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a5a478e9-cd50-4d8d-98b7-0f2af420bb35 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.325541] env[69927]: DEBUG oslo_vmware.api [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1397.325541] env[69927]: value = "task-4097054" [ 1397.325541] env[69927]: _type = "Task" [ 1397.325541] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.335648] env[69927]: DEBUG oslo_vmware.api [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097054, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.654920] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1397.655240] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1397.657161] env[69927]: INFO nova.compute.claims [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1397.836064] env[69927]: DEBUG oslo_vmware.api [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097054, 'name': PowerOffVM_Task, 'duration_secs': 0.200171} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.836308] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1397.836481] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1397.836752] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-abf1ddfb-242a-44a5-82eb-c17864f8e2ce {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.906975] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1397.907402] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1397.907677] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Deleting the datastore file [datastore2] 9e9e93cc-e225-4ec7-850f-916aa078ba30 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1397.907957] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-381d79a1-8f21-4bd5-8656-56b006547c7d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.915330] env[69927]: DEBUG oslo_vmware.api [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1397.915330] env[69927]: value = "task-4097056" [ 1397.915330] env[69927]: _type = "Task" [ 1397.915330] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.924156] env[69927]: DEBUG oslo_vmware.api [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097056, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.425711] env[69927]: DEBUG oslo_vmware.api [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097056, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140093} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.425998] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1398.426124] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1398.426312] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1398.426543] env[69927]: INFO nova.compute.manager [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1398.426823] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1398.426981] env[69927]: DEBUG nova.compute.manager [-] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1398.427088] env[69927]: DEBUG nova.network.neutron [-] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1398.752329] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a59d432-7c57-4c2c-b835-e653fd00e3c1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.760293] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf38dd4-5c57-4ed9-af0d-f3ea9556d73c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.791836] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281b46e5-aa2e-45be-9328-15d3fac72a64 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.800384] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5db50bc8-0efc-42c7-89e3-4b568046ed3a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.814825] env[69927]: DEBUG nova.compute.provider_tree [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1399.096073] env[69927]: DEBUG nova.compute.manager [req-1492b6aa-d76e-48ac-b89f-d2d15f24ac6d req-9762ae02-7ea8-4479-84d2-8ab453132441 service nova] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Received event network-vif-deleted-212263f4-2ce1-4c5f-a203-887ea0b690a0 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1399.096295] env[69927]: INFO nova.compute.manager [req-1492b6aa-d76e-48ac-b89f-d2d15f24ac6d req-9762ae02-7ea8-4479-84d2-8ab453132441 service nova] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Neutron deleted interface 212263f4-2ce1-4c5f-a203-887ea0b690a0; detaching it from the instance and deleting it from the info cache [ 1399.096474] env[69927]: DEBUG nova.network.neutron [req-1492b6aa-d76e-48ac-b89f-d2d15f24ac6d req-9762ae02-7ea8-4479-84d2-8ab453132441 service nova] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1399.318108] env[69927]: DEBUG nova.scheduler.client.report [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1399.581561] env[69927]: DEBUG nova.network.neutron [-] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1399.600347] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-13ee605b-54c7-40df-83a6-7ada3b7d082b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.609910] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16fe871-3635-48ce-a78d-2994042a0800 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.636847] env[69927]: DEBUG nova.compute.manager [req-1492b6aa-d76e-48ac-b89f-d2d15f24ac6d req-9762ae02-7ea8-4479-84d2-8ab453132441 service nova] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Detach interface failed, port_id=212263f4-2ce1-4c5f-a203-887ea0b690a0, reason: Instance 9e9e93cc-e225-4ec7-850f-916aa078ba30 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1399.824221] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.169s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1399.824729] env[69927]: DEBUG nova.compute.manager [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1400.084865] env[69927]: INFO nova.compute.manager [-] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Took 1.66 seconds to deallocate network for instance. [ 1400.330237] env[69927]: DEBUG nova.compute.utils [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1400.332133] env[69927]: DEBUG nova.compute.manager [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1400.332331] env[69927]: DEBUG nova.network.neutron [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1400.376423] env[69927]: DEBUG nova.policy [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a1c6d9294a48433baeb1f60ff3c3a6b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f1b36d5fd7594ac6b592b03a819b3ab9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1400.593153] env[69927]: DEBUG oslo_concurrency.lockutils [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1400.593153] env[69927]: DEBUG oslo_concurrency.lockutils [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1400.593153] env[69927]: DEBUG nova.objects.instance [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lazy-loading 'resources' on Instance uuid 9e9e93cc-e225-4ec7-850f-916aa078ba30 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1400.644853] env[69927]: DEBUG nova.network.neutron [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Successfully created port: 2f031d13-6ba5-4d47-a0af-c37c122a67df {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1400.835482] env[69927]: DEBUG nova.compute.manager [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1401.170499] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-887b2b2e-cd6c-48f3-957f-e6fb4943dad1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.178724] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edece633-fe4d-439e-81ac-ff330b54f3a9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.208464] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd5c955c-9aae-4194-811c-79f3817322bd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.216598] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae7c8428-09b2-4f96-b803-381ddd4ca8c3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.230162] env[69927]: DEBUG nova.compute.provider_tree [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1401.733489] env[69927]: DEBUG nova.scheduler.client.report [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1401.844540] env[69927]: DEBUG nova.compute.manager [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1401.878187] env[69927]: DEBUG nova.virt.hardware [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1401.878457] env[69927]: DEBUG nova.virt.hardware [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1401.878614] env[69927]: DEBUG nova.virt.hardware [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1401.878797] env[69927]: DEBUG nova.virt.hardware [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1401.879041] env[69927]: DEBUG nova.virt.hardware [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1401.879150] env[69927]: DEBUG nova.virt.hardware [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1401.879362] env[69927]: DEBUG nova.virt.hardware [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1401.879525] env[69927]: DEBUG nova.virt.hardware [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1401.879693] env[69927]: DEBUG nova.virt.hardware [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1401.879856] env[69927]: DEBUG nova.virt.hardware [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1401.880036] env[69927]: DEBUG nova.virt.hardware [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1401.881540] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c05482aa-fa87-4ed0-b956-d1c3151242e6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.889658] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d895715-96e9-437c-973a-70456d866758 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.024364] env[69927]: DEBUG nova.compute.manager [req-e30664da-0f25-4385-a98d-1c6e2a231ebe req-4823ef1f-9647-401b-980b-2964d4c4edad service nova] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Received event network-vif-plugged-2f031d13-6ba5-4d47-a0af-c37c122a67df {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1402.024584] env[69927]: DEBUG oslo_concurrency.lockutils [req-e30664da-0f25-4385-a98d-1c6e2a231ebe req-4823ef1f-9647-401b-980b-2964d4c4edad service nova] Acquiring lock "f4512f10-36bf-4277-acb7-e09a2a0d2a37-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1402.024790] env[69927]: DEBUG oslo_concurrency.lockutils [req-e30664da-0f25-4385-a98d-1c6e2a231ebe req-4823ef1f-9647-401b-980b-2964d4c4edad service nova] Lock "f4512f10-36bf-4277-acb7-e09a2a0d2a37-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1402.024963] env[69927]: DEBUG oslo_concurrency.lockutils [req-e30664da-0f25-4385-a98d-1c6e2a231ebe req-4823ef1f-9647-401b-980b-2964d4c4edad service nova] Lock "f4512f10-36bf-4277-acb7-e09a2a0d2a37-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1402.025142] env[69927]: DEBUG nova.compute.manager [req-e30664da-0f25-4385-a98d-1c6e2a231ebe req-4823ef1f-9647-401b-980b-2964d4c4edad service nova] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] No waiting events found dispatching network-vif-plugged-2f031d13-6ba5-4d47-a0af-c37c122a67df {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1402.025379] env[69927]: WARNING nova.compute.manager [req-e30664da-0f25-4385-a98d-1c6e2a231ebe req-4823ef1f-9647-401b-980b-2964d4c4edad service nova] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Received unexpected event network-vif-plugged-2f031d13-6ba5-4d47-a0af-c37c122a67df for instance with vm_state building and task_state spawning. [ 1402.110657] env[69927]: DEBUG nova.network.neutron [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Successfully updated port: 2f031d13-6ba5-4d47-a0af-c37c122a67df {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1402.238298] env[69927]: DEBUG oslo_concurrency.lockutils [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.646s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1402.257687] env[69927]: INFO nova.scheduler.client.report [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Deleted allocations for instance 9e9e93cc-e225-4ec7-850f-916aa078ba30 [ 1402.613493] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Acquiring lock "refresh_cache-f4512f10-36bf-4277-acb7-e09a2a0d2a37" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.613683] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Acquired lock "refresh_cache-f4512f10-36bf-4277-acb7-e09a2a0d2a37" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1402.613800] env[69927]: DEBUG nova.network.neutron [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1402.765773] env[69927]: DEBUG oslo_concurrency.lockutils [None req-738f0c8c-dd12-4023-8069-71dbc4a58fe4 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "9e9e93cc-e225-4ec7-850f-916aa078ba30" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.965s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1403.146746] env[69927]: DEBUG nova.network.neutron [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1403.278747] env[69927]: DEBUG nova.network.neutron [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Updating instance_info_cache with network_info: [{"id": "2f031d13-6ba5-4d47-a0af-c37c122a67df", "address": "fa:16:3e:ae:c3:6e", "network": {"id": "8a6a1fe2-3bf8-4274-a269-182c9122763b", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1021745150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1b36d5fd7594ac6b592b03a819b3ab9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "42f08482-a1da-405d-9918-d733d9f5173c", "external-id": "nsx-vlan-transportzone-381", "segmentation_id": 381, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f031d13-6b", "ovs_interfaceid": "2f031d13-6ba5-4d47-a0af-c37c122a67df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1403.781609] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Releasing lock "refresh_cache-f4512f10-36bf-4277-acb7-e09a2a0d2a37" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1403.781954] env[69927]: DEBUG nova.compute.manager [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Instance network_info: |[{"id": "2f031d13-6ba5-4d47-a0af-c37c122a67df", "address": "fa:16:3e:ae:c3:6e", "network": {"id": "8a6a1fe2-3bf8-4274-a269-182c9122763b", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1021745150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1b36d5fd7594ac6b592b03a819b3ab9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "42f08482-a1da-405d-9918-d733d9f5173c", "external-id": "nsx-vlan-transportzone-381", "segmentation_id": 381, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f031d13-6b", "ovs_interfaceid": "2f031d13-6ba5-4d47-a0af-c37c122a67df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1403.782460] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:c3:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '42f08482-a1da-405d-9918-d733d9f5173c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2f031d13-6ba5-4d47-a0af-c37c122a67df', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1403.790710] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Creating folder: Project (f1b36d5fd7594ac6b592b03a819b3ab9). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1403.791043] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bccd8158-5d31-42bd-bb81-c15fcfa83f2a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.804261] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Created folder: Project (f1b36d5fd7594ac6b592b03a819b3ab9) in parent group-v811283. [ 1403.804452] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Creating folder: Instances. Parent ref: group-v811618. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1403.804710] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9aa76631-3a5e-4db9-9f99-21661f28a3e0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.815195] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Created folder: Instances in parent group-v811618. [ 1403.815452] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1403.815647] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1403.815855] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7c44900e-ede6-4452-a0ac-e93f75ba8de5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.835351] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1403.835351] env[69927]: value = "task-4097059" [ 1403.835351] env[69927]: _type = "Task" [ 1403.835351] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.843851] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097059, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.054277] env[69927]: DEBUG nova.compute.manager [req-ae3605cf-6232-49db-b359-afec467d5945 req-33b25c0a-a292-4856-b9e8-7ae6e0a072e6 service nova] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Received event network-changed-2f031d13-6ba5-4d47-a0af-c37c122a67df {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1404.054522] env[69927]: DEBUG nova.compute.manager [req-ae3605cf-6232-49db-b359-afec467d5945 req-33b25c0a-a292-4856-b9e8-7ae6e0a072e6 service nova] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Refreshing instance network info cache due to event network-changed-2f031d13-6ba5-4d47-a0af-c37c122a67df. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1404.054787] env[69927]: DEBUG oslo_concurrency.lockutils [req-ae3605cf-6232-49db-b359-afec467d5945 req-33b25c0a-a292-4856-b9e8-7ae6e0a072e6 service nova] Acquiring lock "refresh_cache-f4512f10-36bf-4277-acb7-e09a2a0d2a37" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1404.054944] env[69927]: DEBUG oslo_concurrency.lockutils [req-ae3605cf-6232-49db-b359-afec467d5945 req-33b25c0a-a292-4856-b9e8-7ae6e0a072e6 service nova] Acquired lock "refresh_cache-f4512f10-36bf-4277-acb7-e09a2a0d2a37" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1404.055205] env[69927]: DEBUG nova.network.neutron [req-ae3605cf-6232-49db-b359-afec467d5945 req-33b25c0a-a292-4856-b9e8-7ae6e0a072e6 service nova] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Refreshing network info cache for port 2f031d13-6ba5-4d47-a0af-c37c122a67df {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1404.345103] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097059, 'name': CreateVM_Task, 'duration_secs': 0.318586} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.345495] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1404.345981] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1404.346167] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1404.346494] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1404.346745] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee7aa30b-1b2e-40e4-abb6-e04b81cfa589 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.351241] env[69927]: DEBUG oslo_vmware.api [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Waiting for the task: (returnval){ [ 1404.351241] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52666d90-7c90-0b18-7a1e-631176f5b040" [ 1404.351241] env[69927]: _type = "Task" [ 1404.351241] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.359137] env[69927]: DEBUG oslo_vmware.api [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52666d90-7c90-0b18-7a1e-631176f5b040, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.736657] env[69927]: DEBUG nova.compute.manager [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Stashing vm_state: active {{(pid=69927) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1404.769790] env[69927]: DEBUG nova.network.neutron [req-ae3605cf-6232-49db-b359-afec467d5945 req-33b25c0a-a292-4856-b9e8-7ae6e0a072e6 service nova] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Updated VIF entry in instance network info cache for port 2f031d13-6ba5-4d47-a0af-c37c122a67df. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1404.770167] env[69927]: DEBUG nova.network.neutron [req-ae3605cf-6232-49db-b359-afec467d5945 req-33b25c0a-a292-4856-b9e8-7ae6e0a072e6 service nova] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Updating instance_info_cache with network_info: [{"id": "2f031d13-6ba5-4d47-a0af-c37c122a67df", "address": "fa:16:3e:ae:c3:6e", "network": {"id": "8a6a1fe2-3bf8-4274-a269-182c9122763b", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1021745150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1b36d5fd7594ac6b592b03a819b3ab9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "42f08482-a1da-405d-9918-d733d9f5173c", "external-id": "nsx-vlan-transportzone-381", "segmentation_id": 381, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f031d13-6b", "ovs_interfaceid": "2f031d13-6ba5-4d47-a0af-c37c122a67df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.861781] env[69927]: DEBUG oslo_vmware.api [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52666d90-7c90-0b18-7a1e-631176f5b040, 'name': SearchDatastore_Task, 'duration_secs': 0.009755} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.862084] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1404.862364] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1404.862605] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1404.862753] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1404.862929] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1404.863213] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c1771ff7-0129-4322-a6eb-9fbb66951fc2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.872137] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1404.872339] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1404.873047] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd0e6dec-62c7-4579-b155-d66a4f4eef4b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.878459] env[69927]: DEBUG oslo_vmware.api [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Waiting for the task: (returnval){ [ 1404.878459] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520c2a78-44f7-6d24-bf99-cb0fea701b8a" [ 1404.878459] env[69927]: _type = "Task" [ 1404.878459] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.886229] env[69927]: DEBUG oslo_vmware.api [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520c2a78-44f7-6d24-bf99-cb0fea701b8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.257192] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1405.257473] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1405.272522] env[69927]: DEBUG oslo_concurrency.lockutils [req-ae3605cf-6232-49db-b359-afec467d5945 req-33b25c0a-a292-4856-b9e8-7ae6e0a072e6 service nova] Releasing lock "refresh_cache-f4512f10-36bf-4277-acb7-e09a2a0d2a37" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1405.388695] env[69927]: DEBUG oslo_vmware.api [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]520c2a78-44f7-6d24-bf99-cb0fea701b8a, 'name': SearchDatastore_Task, 'duration_secs': 0.009221} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.389513] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48dcb2a5-7e9a-4640-8b01-20fbe3c87edf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.394892] env[69927]: DEBUG oslo_vmware.api [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Waiting for the task: (returnval){ [ 1405.394892] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526304ee-9c78-808d-44ef-d9c8f102fff7" [ 1405.394892] env[69927]: _type = "Task" [ 1405.394892] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.403398] env[69927]: DEBUG oslo_vmware.api [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526304ee-9c78-808d-44ef-d9c8f102fff7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.764070] env[69927]: INFO nova.compute.claims [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1405.906536] env[69927]: DEBUG oslo_vmware.api [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]526304ee-9c78-808d-44ef-d9c8f102fff7, 'name': SearchDatastore_Task, 'duration_secs': 0.009618} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.906804] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1405.907080] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] f4512f10-36bf-4277-acb7-e09a2a0d2a37/f4512f10-36bf-4277-acb7-e09a2a0d2a37.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1405.907341] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-419ba980-a51e-482a-b48f-31a792721647 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.914449] env[69927]: DEBUG oslo_vmware.api [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Waiting for the task: (returnval){ [ 1405.914449] env[69927]: value = "task-4097060" [ 1405.914449] env[69927]: _type = "Task" [ 1405.914449] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.924263] env[69927]: DEBUG oslo_vmware.api [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Task: {'id': task-4097060, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.269405] env[69927]: INFO nova.compute.resource_tracker [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Updating resource usage from migration a7213b65-817c-49e6-be6b-58b66429ffb9 [ 1406.351333] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13cb1358-8ded-45dc-994d-94f374d2a663 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.360122] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78fcb083-0354-476b-a8c9-fb174df7a842 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.391092] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe48e94-1771-46f2-ab26-f2416e8c740a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.399321] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-105dcdb3-5bdd-43ed-8cbe-2ae2acb8c2b2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.414030] env[69927]: DEBUG nova.compute.provider_tree [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1406.423788] env[69927]: DEBUG oslo_vmware.api [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Task: {'id': task-4097060, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487148} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.424687] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] f4512f10-36bf-4277-acb7-e09a2a0d2a37/f4512f10-36bf-4277-acb7-e09a2a0d2a37.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1406.424881] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1406.425138] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-133b6380-79fd-4d2f-891c-1280f9b808ee {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.432786] env[69927]: DEBUG oslo_vmware.api [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Waiting for the task: (returnval){ [ 1406.432786] env[69927]: value = "task-4097061" [ 1406.432786] env[69927]: _type = "Task" [ 1406.432786] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.443244] env[69927]: DEBUG oslo_vmware.api [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Task: {'id': task-4097061, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.919985] env[69927]: DEBUG nova.scheduler.client.report [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1406.944118] env[69927]: DEBUG oslo_vmware.api [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Task: {'id': task-4097061, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069193} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.944350] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1406.945142] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-257d5a76-949a-47cf-bc73-f2a17d170915 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.968530] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Reconfiguring VM instance instance-0000007b to attach disk [datastore2] f4512f10-36bf-4277-acb7-e09a2a0d2a37/f4512f10-36bf-4277-acb7-e09a2a0d2a37.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1406.968830] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b344e9a6-72c3-4fb8-9421-271eba00306f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.990161] env[69927]: DEBUG oslo_vmware.api [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Waiting for the task: (returnval){ [ 1406.990161] env[69927]: value = "task-4097062" [ 1406.990161] env[69927]: _type = "Task" [ 1406.990161] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.998958] env[69927]: DEBUG oslo_vmware.api [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Task: {'id': task-4097062, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.424899] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.167s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1407.425315] env[69927]: INFO nova.compute.manager [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Migrating [ 1407.500289] env[69927]: DEBUG oslo_vmware.api [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Task: {'id': task-4097062, 'name': ReconfigVM_Task, 'duration_secs': 0.346815} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.500467] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Reconfigured VM instance instance-0000007b to attach disk [datastore2] f4512f10-36bf-4277-acb7-e09a2a0d2a37/f4512f10-36bf-4277-acb7-e09a2a0d2a37.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1407.501086] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-24d5c00d-70d5-4601-9239-40c76ec2927e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.507251] env[69927]: DEBUG oslo_vmware.api [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Waiting for the task: (returnval){ [ 1407.507251] env[69927]: value = "task-4097063" [ 1407.507251] env[69927]: _type = "Task" [ 1407.507251] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.515243] env[69927]: DEBUG oslo_vmware.api [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Task: {'id': task-4097063, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.940033] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "refresh_cache-da468d11-82a4-4fec-b06a-1b522bacdbc2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1407.940278] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired lock "refresh_cache-da468d11-82a4-4fec-b06a-1b522bacdbc2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1407.940476] env[69927]: DEBUG nova.network.neutron [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1408.017538] env[69927]: DEBUG oslo_vmware.api [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Task: {'id': task-4097063, 'name': Rename_Task, 'duration_secs': 0.149056} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.017812] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1408.018063] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-90c6a5e1-97ea-4ea0-8e84-46807d153652 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.025035] env[69927]: DEBUG oslo_vmware.api [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Waiting for the task: (returnval){ [ 1408.025035] env[69927]: value = "task-4097064" [ 1408.025035] env[69927]: _type = "Task" [ 1408.025035] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.032680] env[69927]: DEBUG oslo_vmware.api [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Task: {'id': task-4097064, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.536104] env[69927]: DEBUG oslo_vmware.api [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Task: {'id': task-4097064, 'name': PowerOnVM_Task, 'duration_secs': 0.448025} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.536560] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1408.536636] env[69927]: INFO nova.compute.manager [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Took 6.69 seconds to spawn the instance on the hypervisor. [ 1408.536759] env[69927]: DEBUG nova.compute.manager [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1408.537560] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36408946-9ac8-418d-8a86-7a475e69403a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.651946] env[69927]: DEBUG nova.network.neutron [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Updating instance_info_cache with network_info: [{"id": "47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07", "address": "fa:16:3e:ed:aa:67", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47e9852f-a4", "ovs_interfaceid": "47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1409.057854] env[69927]: INFO nova.compute.manager [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Took 11.42 seconds to build instance. [ 1409.154613] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Releasing lock "refresh_cache-da468d11-82a4-4fec-b06a-1b522bacdbc2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1409.560617] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c789e608-78eb-44ee-ab09-69866b2ad29f tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Lock "f4512f10-36bf-4277-acb7-e09a2a0d2a37" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.935s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1410.208475] env[69927]: DEBUG oslo_concurrency.lockutils [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Acquiring lock "f4512f10-36bf-4277-acb7-e09a2a0d2a37" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1410.208727] env[69927]: DEBUG oslo_concurrency.lockutils [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Lock "f4512f10-36bf-4277-acb7-e09a2a0d2a37" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1410.208947] env[69927]: DEBUG oslo_concurrency.lockutils [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Acquiring lock "f4512f10-36bf-4277-acb7-e09a2a0d2a37-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1410.209160] env[69927]: DEBUG oslo_concurrency.lockutils [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Lock "f4512f10-36bf-4277-acb7-e09a2a0d2a37-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1410.209342] env[69927]: DEBUG oslo_concurrency.lockutils [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Lock "f4512f10-36bf-4277-acb7-e09a2a0d2a37-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1410.211398] env[69927]: INFO nova.compute.manager [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Terminating instance [ 1410.669599] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88de3bd3-7c9f-4546-afbf-f7cc254e3ac8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.688171] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Updating instance 'da468d11-82a4-4fec-b06a-1b522bacdbc2' progress to 0 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1410.714504] env[69927]: DEBUG nova.compute.manager [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1410.714736] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1410.715576] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87c7ee18-96f3-4572-b826-f6f6939b22eb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.722968] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1410.723228] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ddd07394-7a94-4b61-b9a4-9d5a244d7034 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.729676] env[69927]: DEBUG oslo_vmware.api [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Waiting for the task: (returnval){ [ 1410.729676] env[69927]: value = "task-4097065" [ 1410.729676] env[69927]: _type = "Task" [ 1410.729676] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.738020] env[69927]: DEBUG oslo_vmware.api [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Task: {'id': task-4097065, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.194705] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1411.194929] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5ceb8e97-79b7-428b-aaab-e87b487762d7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.202655] env[69927]: DEBUG oslo_vmware.api [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1411.202655] env[69927]: value = "task-4097066" [ 1411.202655] env[69927]: _type = "Task" [ 1411.202655] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.211328] env[69927]: DEBUG oslo_vmware.api [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097066, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.239804] env[69927]: DEBUG oslo_vmware.api [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Task: {'id': task-4097065, 'name': PowerOffVM_Task, 'duration_secs': 0.191207} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.240133] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1411.240330] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1411.240611] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-703c8afb-f766-45db-9b43-cc8f2b9355e3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.324064] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1411.324325] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1411.324485] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Deleting the datastore file [datastore2] f4512f10-36bf-4277-acb7-e09a2a0d2a37 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1411.324724] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a52def9-cc85-49af-9170-f90ccc748e64 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.332874] env[69927]: DEBUG oslo_vmware.api [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Waiting for the task: (returnval){ [ 1411.332874] env[69927]: value = "task-4097068" [ 1411.332874] env[69927]: _type = "Task" [ 1411.332874] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.342038] env[69927]: DEBUG oslo_vmware.api [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Task: {'id': task-4097068, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.714438] env[69927]: DEBUG oslo_vmware.api [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097066, 'name': PowerOffVM_Task, 'duration_secs': 0.242306} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.714808] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1411.714857] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Updating instance 'da468d11-82a4-4fec-b06a-1b522bacdbc2' progress to 17 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1411.843494] env[69927]: DEBUG oslo_vmware.api [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Task: {'id': task-4097068, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171353} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.843757] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1411.843927] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1411.844116] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1411.844303] env[69927]: INFO nova.compute.manager [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1411.844527] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1411.844707] env[69927]: DEBUG nova.compute.manager [-] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1411.844799] env[69927]: DEBUG nova.network.neutron [-] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1412.099518] env[69927]: DEBUG nova.compute.manager [req-3e972db5-1dcc-4b4d-aa1c-1acb4002305d req-a78fac9b-00ea-4bfa-b58d-21cb330fa8df service nova] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Received event network-vif-deleted-2f031d13-6ba5-4d47-a0af-c37c122a67df {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1412.099725] env[69927]: INFO nova.compute.manager [req-3e972db5-1dcc-4b4d-aa1c-1acb4002305d req-a78fac9b-00ea-4bfa-b58d-21cb330fa8df service nova] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Neutron deleted interface 2f031d13-6ba5-4d47-a0af-c37c122a67df; detaching it from the instance and deleting it from the info cache [ 1412.099905] env[69927]: DEBUG nova.network.neutron [req-3e972db5-1dcc-4b4d-aa1c-1acb4002305d req-a78fac9b-00ea-4bfa-b58d-21cb330fa8df service nova] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1412.221076] env[69927]: DEBUG nova.virt.hardware [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:35:01Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1412.221323] env[69927]: DEBUG nova.virt.hardware [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1412.221484] env[69927]: DEBUG nova.virt.hardware [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1412.221666] env[69927]: DEBUG nova.virt.hardware [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1412.221814] env[69927]: DEBUG nova.virt.hardware [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1412.221962] env[69927]: DEBUG nova.virt.hardware [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1412.222237] env[69927]: DEBUG nova.virt.hardware [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1412.222373] env[69927]: DEBUG nova.virt.hardware [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1412.222542] env[69927]: DEBUG nova.virt.hardware [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1412.222704] env[69927]: DEBUG nova.virt.hardware [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1412.222894] env[69927]: DEBUG nova.virt.hardware [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1412.228585] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe726bad-557d-4bce-8189-8374e6fa3bdb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.245354] env[69927]: DEBUG oslo_vmware.api [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1412.245354] env[69927]: value = "task-4097069" [ 1412.245354] env[69927]: _type = "Task" [ 1412.245354] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.253422] env[69927]: DEBUG oslo_vmware.api [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097069, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.582157] env[69927]: DEBUG nova.network.neutron [-] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1412.603311] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-213afec1-617f-40b9-ae16-817932ba6021 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.614367] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9235f8b0-d1e1-4bd1-a8dc-9b561d4248d8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.641528] env[69927]: DEBUG nova.compute.manager [req-3e972db5-1dcc-4b4d-aa1c-1acb4002305d req-a78fac9b-00ea-4bfa-b58d-21cb330fa8df service nova] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Detach interface failed, port_id=2f031d13-6ba5-4d47-a0af-c37c122a67df, reason: Instance f4512f10-36bf-4277-acb7-e09a2a0d2a37 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1412.755785] env[69927]: DEBUG oslo_vmware.api [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097069, 'name': ReconfigVM_Task, 'duration_secs': 0.165372} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.756220] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Updating instance 'da468d11-82a4-4fec-b06a-1b522bacdbc2' progress to 33 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1413.084720] env[69927]: INFO nova.compute.manager [-] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Took 1.24 seconds to deallocate network for instance. [ 1413.263995] env[69927]: DEBUG nova.virt.hardware [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1413.264268] env[69927]: DEBUG nova.virt.hardware [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1413.264429] env[69927]: DEBUG nova.virt.hardware [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1413.264653] env[69927]: DEBUG nova.virt.hardware [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1413.264852] env[69927]: DEBUG nova.virt.hardware [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1413.265045] env[69927]: DEBUG nova.virt.hardware [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1413.265308] env[69927]: DEBUG nova.virt.hardware [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1413.265518] env[69927]: DEBUG nova.virt.hardware [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1413.265732] env[69927]: DEBUG nova.virt.hardware [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1413.265929] env[69927]: DEBUG nova.virt.hardware [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1413.266167] env[69927]: DEBUG nova.virt.hardware [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1413.272279] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Reconfiguring VM instance instance-00000066 to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1413.272620] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4e1f65e-5339-483f-a55b-5653b915f3dc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.293499] env[69927]: DEBUG oslo_vmware.api [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1413.293499] env[69927]: value = "task-4097070" [ 1413.293499] env[69927]: _type = "Task" [ 1413.293499] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.304197] env[69927]: DEBUG oslo_vmware.api [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097070, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.590551] env[69927]: DEBUG oslo_concurrency.lockutils [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1413.590854] env[69927]: DEBUG oslo_concurrency.lockutils [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1413.591137] env[69927]: DEBUG nova.objects.instance [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Lazy-loading 'resources' on Instance uuid f4512f10-36bf-4277-acb7-e09a2a0d2a37 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1413.804528] env[69927]: DEBUG oslo_vmware.api [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097070, 'name': ReconfigVM_Task, 'duration_secs': 0.158936} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.804917] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Reconfigured VM instance instance-00000066 to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1413.805664] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5aa2e0f-f480-44f9-b4a7-0331495ec538 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.828150] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] da468d11-82a4-4fec-b06a-1b522bacdbc2/da468d11-82a4-4fec-b06a-1b522bacdbc2.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1413.828402] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d7688c2-00fe-4151-a8aa-40d4f9e61fb2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.847588] env[69927]: DEBUG oslo_vmware.api [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1413.847588] env[69927]: value = "task-4097071" [ 1413.847588] env[69927]: _type = "Task" [ 1413.847588] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.856129] env[69927]: DEBUG oslo_vmware.api [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097071, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.169753] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4917336-82e5-48a1-922f-fd85d5e56c2a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.177951] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f5c2b28-606e-479e-8787-8b78aaa516fe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.208289] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff275cd-951c-4e67-9ddc-2d81b179415c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.216566] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b72aa17b-bcc5-4248-907a-7a18b55996bd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.232385] env[69927]: DEBUG nova.compute.provider_tree [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1414.357495] env[69927]: DEBUG oslo_vmware.api [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097071, 'name': ReconfigVM_Task, 'duration_secs': 0.274977} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.357739] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Reconfigured VM instance instance-00000066 to attach disk [datastore2] da468d11-82a4-4fec-b06a-1b522bacdbc2/da468d11-82a4-4fec-b06a-1b522bacdbc2.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1414.357993] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Updating instance 'da468d11-82a4-4fec-b06a-1b522bacdbc2' progress to 50 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1414.735041] env[69927]: DEBUG nova.scheduler.client.report [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1414.864556] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab7bd280-4f08-4dce-ae13-f30b56e34e4b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.884244] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38300424-6152-4d46-886f-5dda50f8923e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.902163] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Updating instance 'da468d11-82a4-4fec-b06a-1b522bacdbc2' progress to 67 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1415.240179] env[69927]: DEBUG oslo_concurrency.lockutils [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.649s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1415.258191] env[69927]: INFO nova.scheduler.client.report [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Deleted allocations for instance f4512f10-36bf-4277-acb7-e09a2a0d2a37 [ 1415.440458] env[69927]: DEBUG nova.network.neutron [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Port 47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07 binding to destination host cpu-1 is already ACTIVE {{(pid=69927) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1415.765183] env[69927]: DEBUG oslo_concurrency.lockutils [None req-799de37c-6671-4b63-a21a-01d63b3b69bd tempest-ServerPasswordTestJSON-1224543486 tempest-ServerPasswordTestJSON-1224543486-project-member] Lock "f4512f10-36bf-4277-acb7-e09a2a0d2a37" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.556s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1416.462816] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "da468d11-82a4-4fec-b06a-1b522bacdbc2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1416.463223] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "da468d11-82a4-4fec-b06a-1b522bacdbc2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1416.463223] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "da468d11-82a4-4fec-b06a-1b522bacdbc2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1417.503360] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "refresh_cache-da468d11-82a4-4fec-b06a-1b522bacdbc2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.503786] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired lock "refresh_cache-da468d11-82a4-4fec-b06a-1b522bacdbc2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1417.503786] env[69927]: DEBUG nova.network.neutron [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1418.214597] env[69927]: DEBUG nova.network.neutron [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Updating instance_info_cache with network_info: [{"id": "47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07", "address": "fa:16:3e:ed:aa:67", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47e9852f-a4", "ovs_interfaceid": "47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1418.717664] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Releasing lock "refresh_cache-da468d11-82a4-4fec-b06a-1b522bacdbc2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1419.245681] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79045ceb-da03-4c3c-9148-c88471aea6bb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.265385] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a3f7362-5167-4d27-95b9-59eb90af7fe7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.272636] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Updating instance 'da468d11-82a4-4fec-b06a-1b522bacdbc2' progress to 83 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1419.779876] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1419.780325] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-63425692-b66a-40ee-8b81-6d31860e1b5a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.787849] env[69927]: DEBUG oslo_vmware.api [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1419.787849] env[69927]: value = "task-4097072" [ 1419.787849] env[69927]: _type = "Task" [ 1419.787849] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.796476] env[69927]: DEBUG oslo_vmware.api [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097072, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.297760] env[69927]: DEBUG oslo_vmware.api [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097072, 'name': PowerOnVM_Task, 'duration_secs': 0.37226} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.298048] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1420.298242] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-f69648c0-f086-430a-b2c9-72108f41bca9 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Updating instance 'da468d11-82a4-4fec-b06a-1b522bacdbc2' progress to 100 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1422.982054] env[69927]: DEBUG oslo_concurrency.lockutils [None req-25499cc8-1fce-4ace-bf1e-35429ac90736 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "da468d11-82a4-4fec-b06a-1b522bacdbc2" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1422.982454] env[69927]: DEBUG oslo_concurrency.lockutils [None req-25499cc8-1fce-4ace-bf1e-35429ac90736 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "da468d11-82a4-4fec-b06a-1b522bacdbc2" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1422.982580] env[69927]: DEBUG nova.compute.manager [None req-25499cc8-1fce-4ace-bf1e-35429ac90736 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Going to confirm migration 8 {{(pid=69927) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1423.544674] env[69927]: DEBUG oslo_concurrency.lockutils [None req-25499cc8-1fce-4ace-bf1e-35429ac90736 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "refresh_cache-da468d11-82a4-4fec-b06a-1b522bacdbc2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1423.544876] env[69927]: DEBUG oslo_concurrency.lockutils [None req-25499cc8-1fce-4ace-bf1e-35429ac90736 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired lock "refresh_cache-da468d11-82a4-4fec-b06a-1b522bacdbc2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1423.545133] env[69927]: DEBUG nova.network.neutron [None req-25499cc8-1fce-4ace-bf1e-35429ac90736 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1423.545388] env[69927]: DEBUG nova.objects.instance [None req-25499cc8-1fce-4ace-bf1e-35429ac90736 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lazy-loading 'info_cache' on Instance uuid da468d11-82a4-4fec-b06a-1b522bacdbc2 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1424.768771] env[69927]: DEBUG nova.network.neutron [None req-25499cc8-1fce-4ace-bf1e-35429ac90736 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Updating instance_info_cache with network_info: [{"id": "47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07", "address": "fa:16:3e:ed:aa:67", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47e9852f-a4", "ovs_interfaceid": "47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.272192] env[69927]: DEBUG oslo_concurrency.lockutils [None req-25499cc8-1fce-4ace-bf1e-35429ac90736 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Releasing lock "refresh_cache-da468d11-82a4-4fec-b06a-1b522bacdbc2" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1425.272512] env[69927]: DEBUG nova.objects.instance [None req-25499cc8-1fce-4ace-bf1e-35429ac90736 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lazy-loading 'migration_context' on Instance uuid da468d11-82a4-4fec-b06a-1b522bacdbc2 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1425.775951] env[69927]: DEBUG nova.objects.base [None req-25499cc8-1fce-4ace-bf1e-35429ac90736 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=69927) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1425.776953] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f54cb3-0319-4534-a534-d4eec402612a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.796953] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88b27aac-66b9-424c-a44d-5aff45d1b576 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.803192] env[69927]: DEBUG oslo_vmware.api [None req-25499cc8-1fce-4ace-bf1e-35429ac90736 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1425.803192] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a3119c-36dc-cc74-4e0a-0f716d8488a4" [ 1425.803192] env[69927]: _type = "Task" [ 1425.803192] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.812931] env[69927]: DEBUG oslo_vmware.api [None req-25499cc8-1fce-4ace-bf1e-35429ac90736 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a3119c-36dc-cc74-4e0a-0f716d8488a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.313506] env[69927]: DEBUG oslo_vmware.api [None req-25499cc8-1fce-4ace-bf1e-35429ac90736 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52a3119c-36dc-cc74-4e0a-0f716d8488a4, 'name': SearchDatastore_Task, 'duration_secs': 0.010503} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.313829] env[69927]: DEBUG oslo_concurrency.lockutils [None req-25499cc8-1fce-4ace-bf1e-35429ac90736 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1426.314095] env[69927]: DEBUG oslo_concurrency.lockutils [None req-25499cc8-1fce-4ace-bf1e-35429ac90736 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1426.890167] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-176069cd-0a7d-4b59-b66a-933a6b6a9f78 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.898373] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7718cac7-d8b4-4990-9f2a-647e637e4a47 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.930157] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc48f93-488a-4f4b-bc39-5ee03aba17e7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.939514] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5dd3c55-89e0-4683-81e0-59d985e0a3c4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.954270] env[69927]: DEBUG nova.compute.provider_tree [None req-25499cc8-1fce-4ace-bf1e-35429ac90736 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1427.457663] env[69927]: DEBUG nova.scheduler.client.report [None req-25499cc8-1fce-4ace-bf1e-35429ac90736 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1428.468523] env[69927]: DEBUG oslo_concurrency.lockutils [None req-25499cc8-1fce-4ace-bf1e-35429ac90736 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.154s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1428.824140] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cb5387b2-da9a-4b0e-8919-88c771d32d58 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "7a1f95d9-892e-492d-acbe-d70b56c36698" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1428.824405] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cb5387b2-da9a-4b0e-8919-88c771d32d58 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "7a1f95d9-892e-492d-acbe-d70b56c36698" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1429.031762] env[69927]: INFO nova.scheduler.client.report [None req-25499cc8-1fce-4ace-bf1e-35429ac90736 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Deleted allocation for migration a7213b65-817c-49e6-be6b-58b66429ffb9 [ 1429.327368] env[69927]: DEBUG nova.compute.utils [None req-cb5387b2-da9a-4b0e-8919-88c771d32d58 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1429.538878] env[69927]: DEBUG oslo_concurrency.lockutils [None req-25499cc8-1fce-4ace-bf1e-35429ac90736 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "da468d11-82a4-4fec-b06a-1b522bacdbc2" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.556s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1429.830098] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cb5387b2-da9a-4b0e-8919-88c771d32d58 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "7a1f95d9-892e-492d-acbe-d70b56c36698" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1429.912518] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "da468d11-82a4-4fec-b06a-1b522bacdbc2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1429.912774] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "da468d11-82a4-4fec-b06a-1b522bacdbc2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1429.912982] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "da468d11-82a4-4fec-b06a-1b522bacdbc2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1429.913191] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "da468d11-82a4-4fec-b06a-1b522bacdbc2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1429.913363] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "da468d11-82a4-4fec-b06a-1b522bacdbc2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1429.915522] env[69927]: INFO nova.compute.manager [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Terminating instance [ 1430.419424] env[69927]: DEBUG nova.compute.manager [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1430.419657] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1430.420903] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950adb0d-dfd6-41a0-be35-5a236c3a71c2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.428770] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1430.428992] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9f4c85b4-49e9-4a30-a217-0a0e207ddd78 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.436151] env[69927]: DEBUG oslo_vmware.api [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1430.436151] env[69927]: value = "task-4097073" [ 1430.436151] env[69927]: _type = "Task" [ 1430.436151] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.444383] env[69927]: DEBUG oslo_vmware.api [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097073, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.893375] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cb5387b2-da9a-4b0e-8919-88c771d32d58 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "7a1f95d9-892e-492d-acbe-d70b56c36698" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1430.893766] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cb5387b2-da9a-4b0e-8919-88c771d32d58 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "7a1f95d9-892e-492d-acbe-d70b56c36698" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1430.893878] env[69927]: INFO nova.compute.manager [None req-cb5387b2-da9a-4b0e-8919-88c771d32d58 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Attaching volume 79cf1a68-dde3-471c-9057-c0d1153bcb3b to /dev/sdb [ 1430.924027] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc0b784-2e09-4b6c-bc4d-cde24618e8ba {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.931108] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0321f47e-b974-47c3-bc51-da34ba6f41b4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.948637] env[69927]: DEBUG nova.virt.block_device [None req-cb5387b2-da9a-4b0e-8919-88c771d32d58 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Updating existing volume attachment record: a54fe3bd-704f-44b4-b0a9-b75a2182c11c {{(pid=69927) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1430.954013] env[69927]: DEBUG oslo_vmware.api [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097073, 'name': PowerOffVM_Task, 'duration_secs': 0.202287} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.954280] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1430.954455] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1430.954706] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fdc4dea6-dbae-437b-b728-61fc58789b73 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.022454] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1431.022816] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1431.023095] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Deleting the datastore file [datastore2] da468d11-82a4-4fec-b06a-1b522bacdbc2 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1431.023382] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e31107c8-ebda-4c89-b949-afc29e345941 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.031885] env[69927]: DEBUG oslo_vmware.api [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1431.031885] env[69927]: value = "task-4097075" [ 1431.031885] env[69927]: _type = "Task" [ 1431.031885] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.040409] env[69927]: DEBUG oslo_vmware.api [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097075, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.542799] env[69927]: DEBUG oslo_vmware.api [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097075, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146583} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.543062] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1431.543257] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1431.543434] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1431.543614] env[69927]: INFO nova.compute.manager [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1431.543855] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1431.544109] env[69927]: DEBUG nova.compute.manager [-] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1431.544253] env[69927]: DEBUG nova.network.neutron [-] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1432.004373] env[69927]: DEBUG nova.compute.manager [req-1a7cd2c9-10f8-4539-ba56-b0a92bb966e1 req-9278e23f-06b4-4f15-ba30-5ed33336200d service nova] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Received event network-vif-deleted-47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1432.004661] env[69927]: INFO nova.compute.manager [req-1a7cd2c9-10f8-4539-ba56-b0a92bb966e1 req-9278e23f-06b4-4f15-ba30-5ed33336200d service nova] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Neutron deleted interface 47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07; detaching it from the instance and deleting it from the info cache [ 1432.004764] env[69927]: DEBUG nova.network.neutron [req-1a7cd2c9-10f8-4539-ba56-b0a92bb966e1 req-9278e23f-06b4-4f15-ba30-5ed33336200d service nova] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.486658] env[69927]: DEBUG nova.network.neutron [-] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.507669] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0a4bf890-09f0-420d-b800-3913ad4190f4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.518083] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a704867d-fbc9-46b8-b3f4-cdebf0cef282 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.544590] env[69927]: DEBUG nova.compute.manager [req-1a7cd2c9-10f8-4539-ba56-b0a92bb966e1 req-9278e23f-06b4-4f15-ba30-5ed33336200d service nova] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Detach interface failed, port_id=47e9852f-a4d7-4ffd-b9aa-6335c2aa2c07, reason: Instance da468d11-82a4-4fec-b06a-1b522bacdbc2 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1432.989638] env[69927]: INFO nova.compute.manager [-] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Took 1.45 seconds to deallocate network for instance. [ 1433.497219] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1433.497514] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1433.497707] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1433.520076] env[69927]: INFO nova.scheduler.client.report [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Deleted allocations for instance da468d11-82a4-4fec-b06a-1b522bacdbc2 [ 1434.028455] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b1334047-a8ea-4c32-b172-cd6dbc611b3d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "da468d11-82a4-4fec-b06a-1b522bacdbc2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.116s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1434.852812] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "8a907253-623b-456d-8f04-f99fc48ec58a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1434.853115] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "8a907253-623b-456d-8f04-f99fc48ec58a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1435.356228] env[69927]: DEBUG nova.compute.manager [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1435.496602] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb5387b2-da9a-4b0e-8919-88c771d32d58 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Volume attach. Driver type: vmdk {{(pid=69927) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1435.496852] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb5387b2-da9a-4b0e-8919-88c771d32d58 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811622', 'volume_id': '79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'name': 'volume-79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7a1f95d9-892e-492d-acbe-d70b56c36698', 'attached_at': '', 'detached_at': '', 'volume_id': '79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'serial': '79cf1a68-dde3-471c-9057-c0d1153bcb3b'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1435.497764] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e536e9ad-cdab-43a2-9798-575c99c6ff61 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.514251] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f98ca0-0669-465d-abc0-452385afe872 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.538648] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb5387b2-da9a-4b0e-8919-88c771d32d58 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] volume-79cf1a68-dde3-471c-9057-c0d1153bcb3b/volume-79cf1a68-dde3-471c-9057-c0d1153bcb3b.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1435.538902] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24afd805-986a-40b4-afa8-aa0bec71189f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.556937] env[69927]: DEBUG oslo_vmware.api [None req-cb5387b2-da9a-4b0e-8919-88c771d32d58 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1435.556937] env[69927]: value = "task-4097080" [ 1435.556937] env[69927]: _type = "Task" [ 1435.556937] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.565045] env[69927]: DEBUG oslo_vmware.api [None req-cb5387b2-da9a-4b0e-8919-88c771d32d58 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097080, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.880389] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1435.880657] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1435.882264] env[69927]: INFO nova.compute.claims [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1436.066450] env[69927]: DEBUG oslo_vmware.api [None req-cb5387b2-da9a-4b0e-8919-88c771d32d58 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097080, 'name': ReconfigVM_Task, 'duration_secs': 0.344681} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.066726] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb5387b2-da9a-4b0e-8919-88c771d32d58 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Reconfigured VM instance instance-0000007a to attach disk [datastore1] volume-79cf1a68-dde3-471c-9057-c0d1153bcb3b/volume-79cf1a68-dde3-471c-9057-c0d1153bcb3b.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1436.071396] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-978df7be-0c3d-42b4-a3ab-184a534f0030 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.086305] env[69927]: DEBUG oslo_vmware.api [None req-cb5387b2-da9a-4b0e-8919-88c771d32d58 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1436.086305] env[69927]: value = "task-4097081" [ 1436.086305] env[69927]: _type = "Task" [ 1436.086305] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.095078] env[69927]: DEBUG oslo_vmware.api [None req-cb5387b2-da9a-4b0e-8919-88c771d32d58 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097081, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.597940] env[69927]: DEBUG oslo_vmware.api [None req-cb5387b2-da9a-4b0e-8919-88c771d32d58 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097081, 'name': ReconfigVM_Task, 'duration_secs': 0.141384} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.598397] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb5387b2-da9a-4b0e-8919-88c771d32d58 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811622', 'volume_id': '79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'name': 'volume-79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7a1f95d9-892e-492d-acbe-d70b56c36698', 'attached_at': '', 'detached_at': '', 'volume_id': '79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'serial': '79cf1a68-dde3-471c-9057-c0d1153bcb3b'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1436.940710] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93bf774-e1dd-4697-af50-44cd21bb1b9b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.948309] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b607cd64-7300-4a56-8b25-defa25381ab0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.978383] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe8c48c-3e05-439b-bdac-66e24a6989b7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.985897] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fdcb126-6ccc-429c-90ea-01a8849f5ba8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.998942] env[69927]: DEBUG nova.compute.provider_tree [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1437.502712] env[69927]: DEBUG nova.scheduler.client.report [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1437.637174] env[69927]: DEBUG nova.objects.instance [None req-cb5387b2-da9a-4b0e-8919-88c771d32d58 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lazy-loading 'flavor' on Instance uuid 7a1f95d9-892e-492d-acbe-d70b56c36698 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1438.008016] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.127s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1438.008630] env[69927]: DEBUG nova.compute.manager [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1438.142347] env[69927]: DEBUG oslo_concurrency.lockutils [None req-cb5387b2-da9a-4b0e-8919-88c771d32d58 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "7a1f95d9-892e-492d-acbe-d70b56c36698" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.249s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1438.513927] env[69927]: DEBUG nova.compute.utils [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1438.515486] env[69927]: DEBUG nova.compute.manager [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1438.515566] env[69927]: DEBUG nova.network.neutron [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1438.553190] env[69927]: DEBUG nova.policy [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d854e5435164764b5b69b9c7262398f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dee421c661394f3fbf8d69a575f095a9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1438.572094] env[69927]: INFO nova.compute.manager [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Rebuilding instance [ 1438.612901] env[69927]: DEBUG nova.compute.manager [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1438.613813] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b67e60-a579-458d-98b3-b201677ea35e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.840496] env[69927]: DEBUG nova.network.neutron [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Successfully created port: 58b7f909-34e0-488c-b45d-808eea3e0be8 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1439.018917] env[69927]: DEBUG nova.compute.manager [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1439.508016] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1439.628738] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1439.629087] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f67f4b60-4c2a-43f8-9b8e-c02fbda4cc7c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.636767] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1439.636767] env[69927]: value = "task-4097082" [ 1439.636767] env[69927]: _type = "Task" [ 1439.636767] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.646900] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097082, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.029028] env[69927]: DEBUG nova.compute.manager [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1440.056543] env[69927]: DEBUG nova.virt.hardware [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1440.056739] env[69927]: DEBUG nova.virt.hardware [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1440.056901] env[69927]: DEBUG nova.virt.hardware [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1440.057096] env[69927]: DEBUG nova.virt.hardware [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1440.057798] env[69927]: DEBUG nova.virt.hardware [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1440.057798] env[69927]: DEBUG nova.virt.hardware [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1440.057798] env[69927]: DEBUG nova.virt.hardware [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1440.057798] env[69927]: DEBUG nova.virt.hardware [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1440.058017] env[69927]: DEBUG nova.virt.hardware [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1440.058059] env[69927]: DEBUG nova.virt.hardware [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1440.058249] env[69927]: DEBUG nova.virt.hardware [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1440.059136] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d056b9c2-5e17-447b-827f-61bd7174b552 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.067785] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d99d46c4-1ca4-4923-854c-74644d21ca3a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.147622] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097082, 'name': PowerOffVM_Task, 'duration_secs': 0.200345} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.147937] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1440.205041] env[69927]: DEBUG nova.compute.manager [req-f7e47608-9b9a-4853-9234-471fbf044922 req-51bf199c-6b61-4867-8947-149b8baad828 service nova] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Received event network-vif-plugged-58b7f909-34e0-488c-b45d-808eea3e0be8 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1440.205278] env[69927]: DEBUG oslo_concurrency.lockutils [req-f7e47608-9b9a-4853-9234-471fbf044922 req-51bf199c-6b61-4867-8947-149b8baad828 service nova] Acquiring lock "8a907253-623b-456d-8f04-f99fc48ec58a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1440.205486] env[69927]: DEBUG oslo_concurrency.lockutils [req-f7e47608-9b9a-4853-9234-471fbf044922 req-51bf199c-6b61-4867-8947-149b8baad828 service nova] Lock "8a907253-623b-456d-8f04-f99fc48ec58a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1440.205655] env[69927]: DEBUG oslo_concurrency.lockutils [req-f7e47608-9b9a-4853-9234-471fbf044922 req-51bf199c-6b61-4867-8947-149b8baad828 service nova] Lock "8a907253-623b-456d-8f04-f99fc48ec58a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1440.205821] env[69927]: DEBUG nova.compute.manager [req-f7e47608-9b9a-4853-9234-471fbf044922 req-51bf199c-6b61-4867-8947-149b8baad828 service nova] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] No waiting events found dispatching network-vif-plugged-58b7f909-34e0-488c-b45d-808eea3e0be8 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1440.206025] env[69927]: WARNING nova.compute.manager [req-f7e47608-9b9a-4853-9234-471fbf044922 req-51bf199c-6b61-4867-8947-149b8baad828 service nova] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Received unexpected event network-vif-plugged-58b7f909-34e0-488c-b45d-808eea3e0be8 for instance with vm_state building and task_state spawning. [ 1440.207424] env[69927]: INFO nova.compute.manager [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Detaching volume 79cf1a68-dde3-471c-9057-c0d1153bcb3b [ 1440.242764] env[69927]: INFO nova.virt.block_device [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Attempting to driver detach volume 79cf1a68-dde3-471c-9057-c0d1153bcb3b from mountpoint /dev/sdb [ 1440.243012] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Volume detach. Driver type: vmdk {{(pid=69927) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1440.243211] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811622', 'volume_id': '79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'name': 'volume-79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7a1f95d9-892e-492d-acbe-d70b56c36698', 'attached_at': '', 'detached_at': '', 'volume_id': '79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'serial': '79cf1a68-dde3-471c-9057-c0d1153bcb3b'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1440.244091] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed37626-4cb0-49b9-b846-e6c49dea7a0f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.266673] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a417957-4fd3-4f53-8630-b35c43f857a3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.274596] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fb0759d-ee6b-4271-862e-4ab418910c43 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.295669] env[69927]: DEBUG nova.network.neutron [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Successfully updated port: 58b7f909-34e0-488c-b45d-808eea3e0be8 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1440.297375] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e77f7b-c441-4fb1-837e-6318cb4e1100 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.314080] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] The volume has not been displaced from its original location: [datastore1] volume-79cf1a68-dde3-471c-9057-c0d1153bcb3b/volume-79cf1a68-dde3-471c-9057-c0d1153bcb3b.vmdk. No consolidation needed. {{(pid=69927) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1440.319253] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Reconfiguring VM instance instance-0000007a to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1440.320189] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-483ec5ce-672d-40bf-9de3-ec1f5057b2bd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.338804] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1440.338804] env[69927]: value = "task-4097083" [ 1440.338804] env[69927]: _type = "Task" [ 1440.338804] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.347363] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097083, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.801607] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "refresh_cache-8a907253-623b-456d-8f04-f99fc48ec58a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1440.801776] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired lock "refresh_cache-8a907253-623b-456d-8f04-f99fc48ec58a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1440.801939] env[69927]: DEBUG nova.network.neutron [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1440.849106] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097083, 'name': ReconfigVM_Task, 'duration_secs': 0.201653} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.849405] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Reconfigured VM instance instance-0000007a to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1440.854288] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f034dd6-a183-4f50-83a6-a13f51fd1237 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.870395] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1440.870395] env[69927]: value = "task-4097084" [ 1440.870395] env[69927]: _type = "Task" [ 1440.870395] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.879726] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097084, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.334895] env[69927]: DEBUG nova.network.neutron [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1441.383483] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097084, 'name': ReconfigVM_Task, 'duration_secs': 0.168045} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.383818] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811622', 'volume_id': '79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'name': 'volume-79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7a1f95d9-892e-492d-acbe-d70b56c36698', 'attached_at': '', 'detached_at': '', 'volume_id': '79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'serial': '79cf1a68-dde3-471c-9057-c0d1153bcb3b'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1441.477569] env[69927]: DEBUG nova.network.neutron [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Updating instance_info_cache with network_info: [{"id": "58b7f909-34e0-488c-b45d-808eea3e0be8", "address": "fa:16:3e:d6:56:16", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58b7f909-34", "ovs_interfaceid": "58b7f909-34e0-488c-b45d-808eea3e0be8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1441.981061] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Releasing lock "refresh_cache-8a907253-623b-456d-8f04-f99fc48ec58a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1441.981221] env[69927]: DEBUG nova.compute.manager [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Instance network_info: |[{"id": "58b7f909-34e0-488c-b45d-808eea3e0be8", "address": "fa:16:3e:d6:56:16", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58b7f909-34", "ovs_interfaceid": "58b7f909-34e0-488c-b45d-808eea3e0be8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1441.981580] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:56:16', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '57691231-2b8d-4d71-8f79-d4a6a1d95ec8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '58b7f909-34e0-488c-b45d-808eea3e0be8', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1441.988805] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1441.989040] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1441.989276] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-38a18df7-92da-4166-8a67-d76ede34aa60 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.010345] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1442.010345] env[69927]: value = "task-4097085" [ 1442.010345] env[69927]: _type = "Task" [ 1442.010345] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.018323] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097085, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.230745] env[69927]: DEBUG nova.compute.manager [req-692df519-8af0-46f9-ab50-82fa80340ef2 req-1ad8bf89-d0b7-466e-8849-53784d68bd32 service nova] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Received event network-changed-58b7f909-34e0-488c-b45d-808eea3e0be8 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1442.230940] env[69927]: DEBUG nova.compute.manager [req-692df519-8af0-46f9-ab50-82fa80340ef2 req-1ad8bf89-d0b7-466e-8849-53784d68bd32 service nova] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Refreshing instance network info cache due to event network-changed-58b7f909-34e0-488c-b45d-808eea3e0be8. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1442.231172] env[69927]: DEBUG oslo_concurrency.lockutils [req-692df519-8af0-46f9-ab50-82fa80340ef2 req-1ad8bf89-d0b7-466e-8849-53784d68bd32 service nova] Acquiring lock "refresh_cache-8a907253-623b-456d-8f04-f99fc48ec58a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1442.231317] env[69927]: DEBUG oslo_concurrency.lockutils [req-692df519-8af0-46f9-ab50-82fa80340ef2 req-1ad8bf89-d0b7-466e-8849-53784d68bd32 service nova] Acquired lock "refresh_cache-8a907253-623b-456d-8f04-f99fc48ec58a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1442.231507] env[69927]: DEBUG nova.network.neutron [req-692df519-8af0-46f9-ab50-82fa80340ef2 req-1ad8bf89-d0b7-466e-8849-53784d68bd32 service nova] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Refreshing network info cache for port 58b7f909-34e0-488c-b45d-808eea3e0be8 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1442.430519] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1442.431227] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-24dfef22-1341-4c27-abd7-9a3daa97e5f8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.439226] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1442.439226] env[69927]: value = "task-4097086" [ 1442.439226] env[69927]: _type = "Task" [ 1442.439226] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.449314] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] VM already powered off {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1442.449540] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Volume detach. Driver type: vmdk {{(pid=69927) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1442.449738] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811622', 'volume_id': '79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'name': 'volume-79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7a1f95d9-892e-492d-acbe-d70b56c36698', 'attached_at': '', 'detached_at': '', 'volume_id': '79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'serial': '79cf1a68-dde3-471c-9057-c0d1153bcb3b'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1442.450509] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35899009-5f70-46fe-894c-1d2cb9bd0e77 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.469849] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc944b55-bcf6-4337-8f85-2115fc09a059 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.476375] env[69927]: WARNING nova.virt.vmwareapi.driver [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1442.476680] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1442.477438] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9fd0e8d-8e09-4797-81df-b62c028df9df {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.484223] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1442.484469] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cfd23667-d628-4b46-ac35-8c400b905f65 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.520525] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097085, 'name': CreateVM_Task, 'duration_secs': 0.309641} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.520763] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1442.521443] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1442.521618] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1442.521945] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1442.522224] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f47fccf0-2830-4322-a8ab-fa559b739022 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.527488] env[69927]: DEBUG oslo_vmware.api [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1442.527488] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521a98ff-9687-1aa9-1d6c-6e02dfb9450b" [ 1442.527488] env[69927]: _type = "Task" [ 1442.527488] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.536211] env[69927]: DEBUG oslo_vmware.api [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521a98ff-9687-1aa9-1d6c-6e02dfb9450b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.555312] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1442.555546] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1442.555731] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Deleting the datastore file [datastore1] 7a1f95d9-892e-492d-acbe-d70b56c36698 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1442.556022] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b2e2f21d-1a4d-40a2-8295-00318f24893e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.562825] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1442.562825] env[69927]: value = "task-4097088" [ 1442.562825] env[69927]: _type = "Task" [ 1442.562825] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.571189] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097088, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.928521] env[69927]: DEBUG nova.network.neutron [req-692df519-8af0-46f9-ab50-82fa80340ef2 req-1ad8bf89-d0b7-466e-8849-53784d68bd32 service nova] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Updated VIF entry in instance network info cache for port 58b7f909-34e0-488c-b45d-808eea3e0be8. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1442.928875] env[69927]: DEBUG nova.network.neutron [req-692df519-8af0-46f9-ab50-82fa80340ef2 req-1ad8bf89-d0b7-466e-8849-53784d68bd32 service nova] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Updating instance_info_cache with network_info: [{"id": "58b7f909-34e0-488c-b45d-808eea3e0be8", "address": "fa:16:3e:d6:56:16", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58b7f909-34", "ovs_interfaceid": "58b7f909-34e0-488c-b45d-808eea3e0be8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1443.039729] env[69927]: DEBUG oslo_vmware.api [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521a98ff-9687-1aa9-1d6c-6e02dfb9450b, 'name': SearchDatastore_Task, 'duration_secs': 0.010554} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.040018] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1443.040311] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1443.040553] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1443.040705] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1443.040883] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1443.041169] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2cd7480-19ed-402e-b499-f169b8dc8322 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.049888] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1443.050089] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1443.050831] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6107e5da-a92d-4dd2-b9a9-1d823e819056 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.056571] env[69927]: DEBUG oslo_vmware.api [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1443.056571] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5211bc53-4ce0-6a7d-34a0-12938db2d915" [ 1443.056571] env[69927]: _type = "Task" [ 1443.056571] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.064846] env[69927]: DEBUG oslo_vmware.api [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5211bc53-4ce0-6a7d-34a0-12938db2d915, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.072822] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097088, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132143} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.073071] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1443.073262] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1443.073526] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1443.432364] env[69927]: DEBUG oslo_concurrency.lockutils [req-692df519-8af0-46f9-ab50-82fa80340ef2 req-1ad8bf89-d0b7-466e-8849-53784d68bd32 service nova] Releasing lock "refresh_cache-8a907253-623b-456d-8f04-f99fc48ec58a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1443.567499] env[69927]: DEBUG oslo_vmware.api [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5211bc53-4ce0-6a7d-34a0-12938db2d915, 'name': SearchDatastore_Task, 'duration_secs': 0.009428} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.568310] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fd0a89a-567d-4103-8e97-83d71834d0ec {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.574304] env[69927]: DEBUG oslo_vmware.api [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1443.574304] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528b327a-091d-9a1f-5443-8b3f8a0eadad" [ 1443.574304] env[69927]: _type = "Task" [ 1443.574304] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.578662] env[69927]: INFO nova.virt.block_device [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Booting with volume 79cf1a68-dde3-471c-9057-c0d1153bcb3b at /dev/sdb [ 1443.586947] env[69927]: DEBUG oslo_vmware.api [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]528b327a-091d-9a1f-5443-8b3f8a0eadad, 'name': SearchDatastore_Task, 'duration_secs': 0.010315} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.589206] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1443.589473] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 8a907253-623b-456d-8f04-f99fc48ec58a/8a907253-623b-456d-8f04-f99fc48ec58a.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1443.589929] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9240f10a-7e57-4e1f-a626-e2673fd70f19 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.596619] env[69927]: DEBUG oslo_vmware.api [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1443.596619] env[69927]: value = "task-4097089" [ 1443.596619] env[69927]: _type = "Task" [ 1443.596619] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.605148] env[69927]: DEBUG oslo_vmware.api [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097089, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.613710] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9876bba4-b6ea-4444-b4dd-623f5eabe7da {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.622648] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc3af8ce-1f23-43ec-9e5a-6e6c8356a68d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.647947] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bb795a2e-560e-472d-bd87-8fa03ee08cf1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.657736] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1b9a52-416e-47c8-a57b-93381738e819 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.684296] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88017562-a680-4ce7-a982-fa4087920cc9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.691225] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67a1e300-a317-4f01-866c-6cf6e4054710 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.706580] env[69927]: DEBUG nova.virt.block_device [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Updating existing volume attachment record: ba18b81a-2494-4e57-b8d4-0eb14eeccc77 {{(pid=69927) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1444.107670] env[69927]: DEBUG oslo_vmware.api [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097089, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.462609} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.107942] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 8a907253-623b-456d-8f04-f99fc48ec58a/8a907253-623b-456d-8f04-f99fc48ec58a.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1444.108176] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1444.108430] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4e08e490-f214-40dc-b77f-8b85fa23c60d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.115639] env[69927]: DEBUG oslo_vmware.api [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1444.115639] env[69927]: value = "task-4097090" [ 1444.115639] env[69927]: _type = "Task" [ 1444.115639] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.125565] env[69927]: DEBUG oslo_vmware.api [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097090, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.625765] env[69927]: DEBUG oslo_vmware.api [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097090, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109802} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.626134] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1444.626824] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a93e478-60d4-41d2-8aea-39d8b67838d0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.649094] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] 8a907253-623b-456d-8f04-f99fc48ec58a/8a907253-623b-456d-8f04-f99fc48ec58a.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1444.649424] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33add740-b0e7-4991-9d1b-51c0a14348ff {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.671620] env[69927]: DEBUG oslo_vmware.api [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1444.671620] env[69927]: value = "task-4097091" [ 1444.671620] env[69927]: _type = "Task" [ 1444.671620] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.680181] env[69927]: DEBUG oslo_vmware.api [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097091, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.181067] env[69927]: DEBUG oslo_vmware.api [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097091, 'name': ReconfigVM_Task, 'duration_secs': 0.271322} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.181347] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Reconfigured VM instance instance-0000007c to attach disk [datastore2] 8a907253-623b-456d-8f04-f99fc48ec58a/8a907253-623b-456d-8f04-f99fc48ec58a.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1445.182023] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-24431c74-42d2-4855-acc2-3cae0acd15b6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.189072] env[69927]: DEBUG oslo_vmware.api [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1445.189072] env[69927]: value = "task-4097092" [ 1445.189072] env[69927]: _type = "Task" [ 1445.189072] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.197533] env[69927]: DEBUG oslo_vmware.api [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097092, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.699259] env[69927]: DEBUG oslo_vmware.api [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097092, 'name': Rename_Task, 'duration_secs': 0.156695} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.699638] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1445.699737] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27dfaf26-8c89-429c-a3f8-dca504885327 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.706807] env[69927]: DEBUG oslo_vmware.api [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1445.706807] env[69927]: value = "task-4097093" [ 1445.706807] env[69927]: _type = "Task" [ 1445.706807] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.715830] env[69927]: DEBUG oslo_vmware.api [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097093, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.820140] env[69927]: DEBUG nova.virt.hardware [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1445.820408] env[69927]: DEBUG nova.virt.hardware [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1445.820570] env[69927]: DEBUG nova.virt.hardware [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1445.820760] env[69927]: DEBUG nova.virt.hardware [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1445.820908] env[69927]: DEBUG nova.virt.hardware [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1445.821067] env[69927]: DEBUG nova.virt.hardware [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1445.821287] env[69927]: DEBUG nova.virt.hardware [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1445.821482] env[69927]: DEBUG nova.virt.hardware [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1445.821635] env[69927]: DEBUG nova.virt.hardware [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1445.821800] env[69927]: DEBUG nova.virt.hardware [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1445.821972] env[69927]: DEBUG nova.virt.hardware [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1445.822886] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43f86758-fab7-4011-b52e-03e75a23604d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.830925] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e07731d1-506c-4ac5-843e-bbedc04a4af4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.846176] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:c9:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0248a27a-1d7f-4195-987b-06bfc8467347', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6b4007d2-3eea-417e-b36d-28ced978b73f', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1445.853457] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1445.853756] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1445.854731] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a451ecc-502d-4589-8594-84ef17ae3e71 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.873246] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1445.873246] env[69927]: value = "task-4097094" [ 1445.873246] env[69927]: _type = "Task" [ 1445.873246] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.881675] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097094, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.217804] env[69927]: DEBUG oslo_vmware.api [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097093, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.384172] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097094, 'name': CreateVM_Task, 'duration_secs': 0.396704} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.384351] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1446.385060] env[69927]: DEBUG oslo_concurrency.lockutils [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1446.385247] env[69927]: DEBUG oslo_concurrency.lockutils [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1446.385584] env[69927]: DEBUG oslo_concurrency.lockutils [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1446.385857] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af2acec9-f0fb-4f0e-b2a6-f18e4d24adce {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.391492] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1446.391492] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52dbaee8-af06-271b-8876-538c7fc9cc97" [ 1446.391492] env[69927]: _type = "Task" [ 1446.391492] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.401022] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52dbaee8-af06-271b-8876-538c7fc9cc97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.717816] env[69927]: DEBUG oslo_vmware.api [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097093, 'name': PowerOnVM_Task, 'duration_secs': 0.557225} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.718288] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1446.718288] env[69927]: INFO nova.compute.manager [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Took 6.69 seconds to spawn the instance on the hypervisor. [ 1446.718443] env[69927]: DEBUG nova.compute.manager [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1446.719298] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-795455e9-9d75-410d-8192-783b500e8a89 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.901761] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52dbaee8-af06-271b-8876-538c7fc9cc97, 'name': SearchDatastore_Task, 'duration_secs': 0.012751} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.902068] env[69927]: DEBUG oslo_concurrency.lockutils [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1446.902301] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1446.902574] env[69927]: DEBUG oslo_concurrency.lockutils [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1446.902726] env[69927]: DEBUG oslo_concurrency.lockutils [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1446.902902] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1446.903188] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1784b537-0d60-476c-9620-43ab97fc186f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.912263] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1446.912467] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1446.913220] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52a7ebc5-6f2d-4dbc-a1f3-905c7d6cfeaf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.920093] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1446.920093] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529a3296-0568-7b40-7325-8587f262727d" [ 1446.920093] env[69927]: _type = "Task" [ 1446.920093] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.929188] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529a3296-0568-7b40-7325-8587f262727d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.237572] env[69927]: INFO nova.compute.manager [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Took 11.38 seconds to build instance. [ 1447.430913] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529a3296-0568-7b40-7325-8587f262727d, 'name': SearchDatastore_Task, 'duration_secs': 0.010839} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.431784] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8309de0a-fb3f-42bd-a813-8d845766ea96 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.437568] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1447.437568] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52911c14-e05b-11dc-49c0-7dc617c9ab80" [ 1447.437568] env[69927]: _type = "Task" [ 1447.437568] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.446665] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52911c14-e05b-11dc-49c0-7dc617c9ab80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.502382] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1447.507092] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1447.739900] env[69927]: DEBUG oslo_concurrency.lockutils [None req-45fbd1c6-bf8b-411b-968e-2108c07ead4b tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "8a907253-623b-456d-8f04-f99fc48ec58a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.887s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1447.929484] env[69927]: DEBUG nova.compute.manager [req-a29566d7-134f-4882-8f87-294a97ccb66b req-cadd0468-779c-48ea-aff4-3290a19d2de0 service nova] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Received event network-changed-58b7f909-34e0-488c-b45d-808eea3e0be8 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1447.929798] env[69927]: DEBUG nova.compute.manager [req-a29566d7-134f-4882-8f87-294a97ccb66b req-cadd0468-779c-48ea-aff4-3290a19d2de0 service nova] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Refreshing instance network info cache due to event network-changed-58b7f909-34e0-488c-b45d-808eea3e0be8. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1447.930190] env[69927]: DEBUG oslo_concurrency.lockutils [req-a29566d7-134f-4882-8f87-294a97ccb66b req-cadd0468-779c-48ea-aff4-3290a19d2de0 service nova] Acquiring lock "refresh_cache-8a907253-623b-456d-8f04-f99fc48ec58a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1447.930462] env[69927]: DEBUG oslo_concurrency.lockutils [req-a29566d7-134f-4882-8f87-294a97ccb66b req-cadd0468-779c-48ea-aff4-3290a19d2de0 service nova] Acquired lock "refresh_cache-8a907253-623b-456d-8f04-f99fc48ec58a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1447.930606] env[69927]: DEBUG nova.network.neutron [req-a29566d7-134f-4882-8f87-294a97ccb66b req-cadd0468-779c-48ea-aff4-3290a19d2de0 service nova] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Refreshing network info cache for port 58b7f909-34e0-488c-b45d-808eea3e0be8 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1447.949604] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52911c14-e05b-11dc-49c0-7dc617c9ab80, 'name': SearchDatastore_Task, 'duration_secs': 0.011878} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.949903] env[69927]: DEBUG oslo_concurrency.lockutils [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1447.950250] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 7a1f95d9-892e-492d-acbe-d70b56c36698/7a1f95d9-892e-492d-acbe-d70b56c36698.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1447.950551] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-49bf79e7-ad2c-4b9d-b649-9048cd9f5243 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.959412] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1447.959412] env[69927]: value = "task-4097095" [ 1447.959412] env[69927]: _type = "Task" [ 1447.959412] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.968936] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097095, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.471104] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097095, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499977} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.471426] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] 7a1f95d9-892e-492d-acbe-d70b56c36698/7a1f95d9-892e-492d-acbe-d70b56c36698.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1448.471782] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1448.472063] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6352dc19-3ac5-45e9-8e14-2daa3ac0b8e3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.483941] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1448.483941] env[69927]: value = "task-4097096" [ 1448.483941] env[69927]: _type = "Task" [ 1448.483941] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.493550] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097096, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.507228] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1448.507470] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1448.507615] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69927) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1448.507765] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager.update_available_resource {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1448.662891] env[69927]: DEBUG nova.network.neutron [req-a29566d7-134f-4882-8f87-294a97ccb66b req-cadd0468-779c-48ea-aff4-3290a19d2de0 service nova] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Updated VIF entry in instance network info cache for port 58b7f909-34e0-488c-b45d-808eea3e0be8. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1448.663303] env[69927]: DEBUG nova.network.neutron [req-a29566d7-134f-4882-8f87-294a97ccb66b req-cadd0468-779c-48ea-aff4-3290a19d2de0 service nova] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Updating instance_info_cache with network_info: [{"id": "58b7f909-34e0-488c-b45d-808eea3e0be8", "address": "fa:16:3e:d6:56:16", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58b7f909-34", "ovs_interfaceid": "58b7f909-34e0-488c-b45d-808eea3e0be8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1448.994765] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097096, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079847} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.995209] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1448.996044] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe2865c-8e3b-498f-978c-f1f809dbea0e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.011220] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1449.011476] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1449.011680] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1449.011840] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69927) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1449.020735] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] 7a1f95d9-892e-492d-acbe-d70b56c36698/7a1f95d9-892e-492d-acbe-d70b56c36698.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1449.021530] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d2536ee-2204-413c-85a8-8935ea005446 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.024723] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8d34a8f-71be-49aa-82cc-41a400d18d3e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.047492] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc18f880-6bc4-4236-a3e1-79278f190cf0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.051933] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1449.051933] env[69927]: value = "task-4097097" [ 1449.051933] env[69927]: _type = "Task" [ 1449.051933] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.064198] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6811f820-b129-45da-8c90-bac376294375 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.070869] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097097, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.076540] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-001ef521-79db-4fde-ac37-df4478da4db9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.112038] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180211MB free_disk=17GB free_vcpus=48 pci_devices=None {{(pid=69927) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1449.112038] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1449.112038] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1449.165904] env[69927]: DEBUG oslo_concurrency.lockutils [req-a29566d7-134f-4882-8f87-294a97ccb66b req-cadd0468-779c-48ea-aff4-3290a19d2de0 service nova] Releasing lock "refresh_cache-8a907253-623b-456d-8f04-f99fc48ec58a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1449.563515] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097097, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.063132] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097097, 'name': ReconfigVM_Task, 'duration_secs': 0.748158} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.063503] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Reconfigured VM instance instance-0000007a to attach disk [datastore2] 7a1f95d9-892e-492d-acbe-d70b56c36698/7a1f95d9-892e-492d-acbe-d70b56c36698.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1450.064585] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_name': '/dev/sda', 'size': 0, 'encrypted': False, 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'guest_format': None, 'encryption_secret_uuid': None, 'encryption_format': None, 'disk_bus': None, 'image_id': 'f524494e-9179-4b3e-a3e2-782f019def24'}], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': 'ba18b81a-2494-4e57-b8d4-0eb14eeccc77', 'delete_on_termination': False, 'device_type': None, 'boot_index': None, 'guest_format': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811622', 'volume_id': '79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'name': 'volume-79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7a1f95d9-892e-492d-acbe-d70b56c36698', 'attached_at': '', 'detached_at': '', 'volume_id': '79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'serial': '79cf1a68-dde3-471c-9057-c0d1153bcb3b'}, 'mount_device': '/dev/sdb', 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=69927) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1450.064791] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Volume attach. Driver type: vmdk {{(pid=69927) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1450.064986] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811622', 'volume_id': '79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'name': 'volume-79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7a1f95d9-892e-492d-acbe-d70b56c36698', 'attached_at': '', 'detached_at': '', 'volume_id': '79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'serial': '79cf1a68-dde3-471c-9057-c0d1153bcb3b'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1450.065785] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf46607-263b-406b-860d-e2b39b86b7d9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.081396] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae33f8f5-407c-4936-b7fe-a826086e841b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.105907] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] volume-79cf1a68-dde3-471c-9057-c0d1153bcb3b/volume-79cf1a68-dde3-471c-9057-c0d1153bcb3b.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1450.106203] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5cf9dd87-479f-4cda-af4e-db7c414c75fc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.127386] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1450.127386] env[69927]: value = "task-4097098" [ 1450.127386] env[69927]: _type = "Task" [ 1450.127386] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.136452] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097098, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.138446] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1450.138555] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 7a1f95d9-892e-492d-acbe-d70b56c36698 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1450.138665] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 8a907253-623b-456d-8f04-f99fc48ec58a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1450.138854] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1450.138991] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1450.193316] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e925ae3-8b5a-43b2-a59a-1dab0a7c47be {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.201161] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3477e6a0-9afd-40a2-8328-ce53db0e0684 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.233781] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c28906c-ea66-4662-9299-76f6763fdcb7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.242675] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c32b5a2a-e154-4ee4-9e58-6dcd2bc36c4a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.256828] env[69927]: DEBUG nova.compute.provider_tree [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1450.637629] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097098, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.760065] env[69927]: DEBUG nova.scheduler.client.report [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1451.137632] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097098, 'name': ReconfigVM_Task, 'duration_secs': 0.742616} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.138021] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Reconfigured VM instance instance-0000007a to attach disk [datastore1] volume-79cf1a68-dde3-471c-9057-c0d1153bcb3b/volume-79cf1a68-dde3-471c-9057-c0d1153bcb3b.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1451.142726] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02a3cb74-38de-4c85-ae2e-c202afb28752 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.159090] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1451.159090] env[69927]: value = "task-4097099" [ 1451.159090] env[69927]: _type = "Task" [ 1451.159090] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.170594] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097099, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.265713] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69927) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1451.265923] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.156s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1451.669270] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097099, 'name': ReconfigVM_Task, 'duration_secs': 0.152368} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.669560] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811622', 'volume_id': '79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'name': 'volume-79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7a1f95d9-892e-492d-acbe-d70b56c36698', 'attached_at': '', 'detached_at': '', 'volume_id': '79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'serial': '79cf1a68-dde3-471c-9057-c0d1153bcb3b'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1451.670239] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-afaf2f60-acba-4daf-8d95-5ac9873da892 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.677059] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1451.677059] env[69927]: value = "task-4097100" [ 1451.677059] env[69927]: _type = "Task" [ 1451.677059] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.685012] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097100, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.188585] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097100, 'name': Rename_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.266358] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1452.266590] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1452.687810] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097100, 'name': Rename_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.189488] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097100, 'name': Rename_Task, 'duration_secs': 1.156188} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.189917] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1453.190202] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-caad29f1-85fe-40a6-a45a-4d99ff3d2ec2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.197426] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1453.197426] env[69927]: value = "task-4097101" [ 1453.197426] env[69927]: _type = "Task" [ 1453.197426] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.205245] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097101, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.503316] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1453.708289] env[69927]: DEBUG oslo_vmware.api [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097101, 'name': PowerOnVM_Task, 'duration_secs': 0.455213} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.709237] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1453.709237] env[69927]: DEBUG nova.compute.manager [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1453.710713] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89dad8bc-b43e-489d-a0a0-d707baaa6199 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.228241] env[69927]: DEBUG oslo_concurrency.lockutils [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1454.228241] env[69927]: DEBUG oslo_concurrency.lockutils [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1454.228241] env[69927]: DEBUG nova.objects.instance [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69927) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1455.237048] env[69927]: DEBUG oslo_concurrency.lockutils [None req-89fbc2cf-5da7-47d8-a8d2-bb9eb13b268d tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1486.108139] env[69927]: DEBUG nova.compute.manager [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Stashing vm_state: active {{(pid=69927) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1486.625995] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1486.626301] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1487.131824] env[69927]: INFO nova.compute.claims [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1487.638517] env[69927]: INFO nova.compute.resource_tracker [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Updating resource usage from migration c25f4414-b057-4935-8165-60ca0e0f12e0 [ 1487.704295] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e75a187a-d490-400c-81c5-c10dd215a33c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.712911] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff64e8ac-7ffd-4d57-b758-3185abb1da1e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.744989] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d0c487e-a4b5-4cf6-8107-e13a4796be4c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.753637] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-998f3cb5-dd8f-4f46-ae51-a14fa55cdf7e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.768115] env[69927]: DEBUG nova.compute.provider_tree [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1488.271342] env[69927]: DEBUG nova.scheduler.client.report [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1488.777714] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.151s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1488.777907] env[69927]: INFO nova.compute.manager [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Migrating [ 1489.292812] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "refresh_cache-8a907253-623b-456d-8f04-f99fc48ec58a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1489.293213] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired lock "refresh_cache-8a907253-623b-456d-8f04-f99fc48ec58a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1489.293298] env[69927]: DEBUG nova.network.neutron [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1490.008012] env[69927]: DEBUG nova.network.neutron [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Updating instance_info_cache with network_info: [{"id": "58b7f909-34e0-488c-b45d-808eea3e0be8", "address": "fa:16:3e:d6:56:16", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58b7f909-34", "ovs_interfaceid": "58b7f909-34e0-488c-b45d-808eea3e0be8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1490.510961] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Releasing lock "refresh_cache-8a907253-623b-456d-8f04-f99fc48ec58a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1490.597596] env[69927]: DEBUG oslo_concurrency.lockutils [None req-621cf94e-b6da-4828-850a-5af0183c697f tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "7a1f95d9-892e-492d-acbe-d70b56c36698" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1490.597866] env[69927]: DEBUG oslo_concurrency.lockutils [None req-621cf94e-b6da-4828-850a-5af0183c697f tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "7a1f95d9-892e-492d-acbe-d70b56c36698" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1491.100694] env[69927]: INFO nova.compute.manager [None req-621cf94e-b6da-4828-850a-5af0183c697f tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Detaching volume 79cf1a68-dde3-471c-9057-c0d1153bcb3b [ 1491.131226] env[69927]: INFO nova.virt.block_device [None req-621cf94e-b6da-4828-850a-5af0183c697f tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Attempting to driver detach volume 79cf1a68-dde3-471c-9057-c0d1153bcb3b from mountpoint /dev/sdb [ 1491.131472] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-621cf94e-b6da-4828-850a-5af0183c697f tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Volume detach. Driver type: vmdk {{(pid=69927) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1491.131662] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-621cf94e-b6da-4828-850a-5af0183c697f tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811622', 'volume_id': '79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'name': 'volume-79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7a1f95d9-892e-492d-acbe-d70b56c36698', 'attached_at': '', 'detached_at': '', 'volume_id': '79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'serial': '79cf1a68-dde3-471c-9057-c0d1153bcb3b'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1491.132680] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bd891d5-f357-41ff-b6b4-3ca39bd06095 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.155757] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c19a37-9127-4e1d-97ce-18ed22cdc130 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.163305] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ba906a-41b9-499a-928c-15b818889462 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.183415] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6cf9761-e09b-432d-9dcf-e2ddcf5f1d18 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.198592] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-621cf94e-b6da-4828-850a-5af0183c697f tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] The volume has not been displaced from its original location: [datastore1] volume-79cf1a68-dde3-471c-9057-c0d1153bcb3b/volume-79cf1a68-dde3-471c-9057-c0d1153bcb3b.vmdk. No consolidation needed. {{(pid=69927) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1491.204092] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-621cf94e-b6da-4828-850a-5af0183c697f tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Reconfiguring VM instance instance-0000007a to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1491.204385] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6cf05d59-d102-4576-8c62-15d2fab1e56c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.221658] env[69927]: DEBUG oslo_vmware.api [None req-621cf94e-b6da-4828-850a-5af0183c697f tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1491.221658] env[69927]: value = "task-4097102" [ 1491.221658] env[69927]: _type = "Task" [ 1491.221658] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.229902] env[69927]: DEBUG oslo_vmware.api [None req-621cf94e-b6da-4828-850a-5af0183c697f tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097102, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.731938] env[69927]: DEBUG oslo_vmware.api [None req-621cf94e-b6da-4828-850a-5af0183c697f tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097102, 'name': ReconfigVM_Task, 'duration_secs': 0.24003} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.733055] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-621cf94e-b6da-4828-850a-5af0183c697f tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Reconfigured VM instance instance-0000007a to detach disk 2001 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1491.737477] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22bb63cd-4fbb-46c5-a3a6-d7895c9f6b6f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.753211] env[69927]: DEBUG oslo_vmware.api [None req-621cf94e-b6da-4828-850a-5af0183c697f tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1491.753211] env[69927]: value = "task-4097103" [ 1491.753211] env[69927]: _type = "Task" [ 1491.753211] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.761215] env[69927]: DEBUG oslo_vmware.api [None req-621cf94e-b6da-4828-850a-5af0183c697f tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097103, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.027093] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8ddfe5-3a77-4a54-8bc3-f4e731f9160e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.045696] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Updating instance '8a907253-623b-456d-8f04-f99fc48ec58a' progress to 0 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1492.263865] env[69927]: DEBUG oslo_vmware.api [None req-621cf94e-b6da-4828-850a-5af0183c697f tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097103, 'name': ReconfigVM_Task, 'duration_secs': 0.139872} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.264316] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-621cf94e-b6da-4828-850a-5af0183c697f tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811622', 'volume_id': '79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'name': 'volume-79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7a1f95d9-892e-492d-acbe-d70b56c36698', 'attached_at': '', 'detached_at': '', 'volume_id': '79cf1a68-dde3-471c-9057-c0d1153bcb3b', 'serial': '79cf1a68-dde3-471c-9057-c0d1153bcb3b'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1492.551996] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1492.552303] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-03c853f6-c2dc-448d-9cad-66cbfd8999f9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.560023] env[69927]: DEBUG oslo_vmware.api [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1492.560023] env[69927]: value = "task-4097104" [ 1492.560023] env[69927]: _type = "Task" [ 1492.560023] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.568380] env[69927]: DEBUG oslo_vmware.api [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097104, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.804123] env[69927]: DEBUG nova.objects.instance [None req-621cf94e-b6da-4828-850a-5af0183c697f tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lazy-loading 'flavor' on Instance uuid 7a1f95d9-892e-492d-acbe-d70b56c36698 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1493.071117] env[69927]: DEBUG oslo_vmware.api [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097104, 'name': PowerOffVM_Task, 'duration_secs': 0.21467} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.071392] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1493.071578] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Updating instance '8a907253-623b-456d-8f04-f99fc48ec58a' progress to 17 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1493.578053] env[69927]: DEBUG nova.virt.hardware [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:35:01Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1493.578356] env[69927]: DEBUG nova.virt.hardware [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1493.578356] env[69927]: DEBUG nova.virt.hardware [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1493.578488] env[69927]: DEBUG nova.virt.hardware [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1493.578594] env[69927]: DEBUG nova.virt.hardware [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1493.578737] env[69927]: DEBUG nova.virt.hardware [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1493.578936] env[69927]: DEBUG nova.virt.hardware [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1493.579104] env[69927]: DEBUG nova.virt.hardware [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1493.579270] env[69927]: DEBUG nova.virt.hardware [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1493.579428] env[69927]: DEBUG nova.virt.hardware [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1493.579594] env[69927]: DEBUG nova.virt.hardware [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1493.584877] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a329e96-701f-4fd2-b8ae-0b5fafadb91e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.601598] env[69927]: DEBUG oslo_vmware.api [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1493.601598] env[69927]: value = "task-4097105" [ 1493.601598] env[69927]: _type = "Task" [ 1493.601598] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.610797] env[69927]: DEBUG oslo_vmware.api [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097105, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.811480] env[69927]: DEBUG oslo_concurrency.lockutils [None req-621cf94e-b6da-4828-850a-5af0183c697f tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "7a1f95d9-892e-492d-acbe-d70b56c36698" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.213s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1494.111896] env[69927]: DEBUG oslo_vmware.api [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097105, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.612190] env[69927]: DEBUG oslo_vmware.api [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097105, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.834167] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "7a1f95d9-892e-492d-acbe-d70b56c36698" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1494.834599] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "7a1f95d9-892e-492d-acbe-d70b56c36698" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1494.834679] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "7a1f95d9-892e-492d-acbe-d70b56c36698-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1494.834856] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "7a1f95d9-892e-492d-acbe-d70b56c36698-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1494.835032] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "7a1f95d9-892e-492d-acbe-d70b56c36698-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1494.837300] env[69927]: INFO nova.compute.manager [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Terminating instance [ 1495.113376] env[69927]: DEBUG oslo_vmware.api [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097105, 'name': ReconfigVM_Task, 'duration_secs': 1.172472} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.113682] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Updating instance '8a907253-623b-456d-8f04-f99fc48ec58a' progress to 33 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1495.344717] env[69927]: DEBUG nova.compute.manager [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1495.344717] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1495.344717] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e254cd6-88a6-4e3e-ae1b-451b09873128 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.351928] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1495.352262] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4967f9cb-fd6a-4cb0-9953-05e08a067a96 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.359785] env[69927]: DEBUG oslo_vmware.api [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1495.359785] env[69927]: value = "task-4097106" [ 1495.359785] env[69927]: _type = "Task" [ 1495.359785] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.369486] env[69927]: DEBUG oslo_vmware.api [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097106, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.620697] env[69927]: DEBUG nova.virt.hardware [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1495.620937] env[69927]: DEBUG nova.virt.hardware [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1495.621096] env[69927]: DEBUG nova.virt.hardware [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1495.621283] env[69927]: DEBUG nova.virt.hardware [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1495.621433] env[69927]: DEBUG nova.virt.hardware [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1495.621581] env[69927]: DEBUG nova.virt.hardware [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1495.621787] env[69927]: DEBUG nova.virt.hardware [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1495.621970] env[69927]: DEBUG nova.virt.hardware [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1495.622186] env[69927]: DEBUG nova.virt.hardware [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1495.622355] env[69927]: DEBUG nova.virt.hardware [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1495.622533] env[69927]: DEBUG nova.virt.hardware [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1495.627800] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Reconfiguring VM instance instance-0000007c to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1495.628116] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54494980-75c9-4e36-9a68-388ef607100c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.647316] env[69927]: DEBUG oslo_vmware.api [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1495.647316] env[69927]: value = "task-4097107" [ 1495.647316] env[69927]: _type = "Task" [ 1495.647316] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.656953] env[69927]: DEBUG oslo_vmware.api [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097107, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.869788] env[69927]: DEBUG oslo_vmware.api [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097106, 'name': PowerOffVM_Task, 'duration_secs': 0.198392} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.870233] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1495.870311] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1495.870544] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8ff7bfec-6f82-41cc-b154-cb10d5683d1d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.936238] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1495.936604] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1495.936903] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Deleting the datastore file [datastore2] 7a1f95d9-892e-492d-acbe-d70b56c36698 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1495.937322] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9382959a-7256-4d31-8d5e-0c2311db15e1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.945049] env[69927]: DEBUG oslo_vmware.api [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1495.945049] env[69927]: value = "task-4097109" [ 1495.945049] env[69927]: _type = "Task" [ 1495.945049] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.952958] env[69927]: DEBUG oslo_vmware.api [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097109, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.158232] env[69927]: DEBUG oslo_vmware.api [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097107, 'name': ReconfigVM_Task, 'duration_secs': 0.165799} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.158467] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Reconfigured VM instance instance-0000007c to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1496.159628] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c8b7f1-cd67-4e3b-a0d1-599cb11ccf86 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.182186] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] 8a907253-623b-456d-8f04-f99fc48ec58a/8a907253-623b-456d-8f04-f99fc48ec58a.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1496.182482] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7ebfc00-384e-4faf-b78a-1f423c30f4dd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.200211] env[69927]: DEBUG oslo_vmware.api [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1496.200211] env[69927]: value = "task-4097110" [ 1496.200211] env[69927]: _type = "Task" [ 1496.200211] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.208090] env[69927]: DEBUG oslo_vmware.api [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097110, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.455634] env[69927]: DEBUG oslo_vmware.api [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097109, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156472} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.455894] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1496.456022] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1496.456208] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1496.456384] env[69927]: INFO nova.compute.manager [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1496.456632] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1496.456819] env[69927]: DEBUG nova.compute.manager [-] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1496.456917] env[69927]: DEBUG nova.network.neutron [-] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1496.710913] env[69927]: DEBUG oslo_vmware.api [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097110, 'name': ReconfigVM_Task, 'duration_secs': 0.429957} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.711185] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Reconfigured VM instance instance-0000007c to attach disk [datastore2] 8a907253-623b-456d-8f04-f99fc48ec58a/8a907253-623b-456d-8f04-f99fc48ec58a.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1496.711335] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Updating instance '8a907253-623b-456d-8f04-f99fc48ec58a' progress to 50 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1496.909308] env[69927]: DEBUG nova.compute.manager [req-88b57d3a-61e6-4298-88cf-2d9faee74931 req-571e789b-d72d-4f27-9579-27eb29710075 service nova] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Received event network-vif-deleted-6b4007d2-3eea-417e-b36d-28ced978b73f {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1496.909547] env[69927]: INFO nova.compute.manager [req-88b57d3a-61e6-4298-88cf-2d9faee74931 req-571e789b-d72d-4f27-9579-27eb29710075 service nova] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Neutron deleted interface 6b4007d2-3eea-417e-b36d-28ced978b73f; detaching it from the instance and deleting it from the info cache [ 1496.909637] env[69927]: DEBUG nova.network.neutron [req-88b57d3a-61e6-4298-88cf-2d9faee74931 req-571e789b-d72d-4f27-9579-27eb29710075 service nova] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1497.218209] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1cf48a-176c-4138-892f-d5cbcae2d048 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.237213] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c692ede-a0bf-4987-bcb2-1620e245ec81 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.255604] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Updating instance '8a907253-623b-456d-8f04-f99fc48ec58a' progress to 67 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1497.395486] env[69927]: DEBUG nova.network.neutron [-] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1497.414028] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c51c6cf8-c85c-4d71-86f7-8e162cb21944 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.424238] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-747efa84-d90d-4968-aa4c-3e2240d80b6b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.450660] env[69927]: DEBUG nova.compute.manager [req-88b57d3a-61e6-4298-88cf-2d9faee74931 req-571e789b-d72d-4f27-9579-27eb29710075 service nova] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Detach interface failed, port_id=6b4007d2-3eea-417e-b36d-28ced978b73f, reason: Instance 7a1f95d9-892e-492d-acbe-d70b56c36698 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1497.804749] env[69927]: DEBUG nova.network.neutron [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Port 58b7f909-34e0-488c-b45d-808eea3e0be8 binding to destination host cpu-1 is already ACTIVE {{(pid=69927) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1497.897953] env[69927]: INFO nova.compute.manager [-] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Took 1.44 seconds to deallocate network for instance. [ 1498.404819] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1498.405193] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1498.405344] env[69927]: DEBUG nova.objects.instance [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lazy-loading 'resources' on Instance uuid 7a1f95d9-892e-492d-acbe-d70b56c36698 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1498.824175] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "8a907253-623b-456d-8f04-f99fc48ec58a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1498.824411] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "8a907253-623b-456d-8f04-f99fc48ec58a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1498.825107] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "8a907253-623b-456d-8f04-f99fc48ec58a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1498.978467] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a92b1a-61cd-4ddc-bbd2-fadc67c5d483 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.986788] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c9ed2fe-c2a5-4569-9fe5-191f693db6da {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.017452] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4455de1e-c5d7-4c39-a811-0268099aabb5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.025093] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f39b1363-4ff1-4803-910d-f37612d139b8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.038014] env[69927]: DEBUG nova.compute.provider_tree [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1499.541701] env[69927]: DEBUG nova.scheduler.client.report [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1499.864031] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "refresh_cache-8a907253-623b-456d-8f04-f99fc48ec58a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.864031] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired lock "refresh_cache-8a907253-623b-456d-8f04-f99fc48ec58a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1499.864031] env[69927]: DEBUG nova.network.neutron [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1500.047021] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.642s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1500.067844] env[69927]: INFO nova.scheduler.client.report [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Deleted allocations for instance 7a1f95d9-892e-492d-acbe-d70b56c36698 [ 1500.507039] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1500.575235] env[69927]: DEBUG oslo_concurrency.lockutils [None req-4acc3bb6-4ec8-4d4e-b2fd-b58666250d6b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "7a1f95d9-892e-492d-acbe-d70b56c36698" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.741s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1500.592599] env[69927]: DEBUG nova.network.neutron [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Updating instance_info_cache with network_info: [{"id": "58b7f909-34e0-488c-b45d-808eea3e0be8", "address": "fa:16:3e:d6:56:16", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58b7f909-34", "ovs_interfaceid": "58b7f909-34e0-488c-b45d-808eea3e0be8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1501.095746] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Releasing lock "refresh_cache-8a907253-623b-456d-8f04-f99fc48ec58a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1501.622501] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c71ea7b1-718f-4088-af91-e54d6f927521 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.642353] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e176b0-d050-4a93-b957-b9c73c24f796 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.650585] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Updating instance '8a907253-623b-456d-8f04-f99fc48ec58a' progress to 83 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1502.158637] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1502.158637] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-40aa2cf7-4451-48ec-9b05-3c9936206049 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.166610] env[69927]: DEBUG oslo_vmware.api [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1502.166610] env[69927]: value = "task-4097111" [ 1502.166610] env[69927]: _type = "Task" [ 1502.166610] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.175541] env[69927]: DEBUG oslo_vmware.api [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097111, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.300966] env[69927]: DEBUG oslo_concurrency.lockutils [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "eb05a071-88d4-4e51-936f-d5b7554ac204" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1502.301240] env[69927]: DEBUG oslo_concurrency.lockutils [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "eb05a071-88d4-4e51-936f-d5b7554ac204" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1502.677567] env[69927]: DEBUG oslo_vmware.api [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097111, 'name': PowerOnVM_Task, 'duration_secs': 0.412269} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.677875] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1502.678026] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d96f1f39-279b-4b48-9651-9518a805aba5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Updating instance '8a907253-623b-456d-8f04-f99fc48ec58a' progress to 100 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1502.803468] env[69927]: DEBUG nova.compute.manager [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1503.326866] env[69927]: DEBUG oslo_concurrency.lockutils [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1503.327187] env[69927]: DEBUG oslo_concurrency.lockutils [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1503.329281] env[69927]: INFO nova.compute.claims [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1503.509453] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1504.397318] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d1fc9ba-7bc3-4614-ac34-5f2831c89ad5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.404754] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac58dd9d-b884-4f27-bae8-4460a3b54fe6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.434484] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fdcf26d-2ee2-4583-ad01-4bca05c094f4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.441868] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b0e2538-7c20-4420-b674-1277cb434243 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.456313] env[69927]: DEBUG nova.compute.provider_tree [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1504.588807] env[69927]: DEBUG nova.network.neutron [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Port 58b7f909-34e0-488c-b45d-808eea3e0be8 binding to destination host cpu-1 is already ACTIVE {{(pid=69927) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1504.589087] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "refresh_cache-8a907253-623b-456d-8f04-f99fc48ec58a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1504.589248] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired lock "refresh_cache-8a907253-623b-456d-8f04-f99fc48ec58a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1504.589412] env[69927]: DEBUG nova.network.neutron [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1504.960306] env[69927]: DEBUG nova.scheduler.client.report [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1505.304163] env[69927]: DEBUG nova.network.neutron [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Updating instance_info_cache with network_info: [{"id": "58b7f909-34e0-488c-b45d-808eea3e0be8", "address": "fa:16:3e:d6:56:16", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58b7f909-34", "ovs_interfaceid": "58b7f909-34e0-488c-b45d-808eea3e0be8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1505.465632] env[69927]: DEBUG oslo_concurrency.lockutils [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.138s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1505.466128] env[69927]: DEBUG nova.compute.manager [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1505.806856] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Releasing lock "refresh_cache-8a907253-623b-456d-8f04-f99fc48ec58a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1505.971172] env[69927]: DEBUG nova.compute.utils [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1505.972612] env[69927]: DEBUG nova.compute.manager [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1505.972854] env[69927]: DEBUG nova.network.neutron [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1506.008332] env[69927]: DEBUG nova.policy [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef08164611894c289d4c30194d91526a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0823381b9f644adf818b490c551f5a3f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1506.258531] env[69927]: DEBUG nova.network.neutron [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Successfully created port: 961d8267-5b82-4297-96a8-4806c2f9d8a5 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1506.309941] env[69927]: DEBUG nova.compute.manager [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=69927) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:900}} [ 1506.310193] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1506.310451] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1506.475822] env[69927]: DEBUG nova.compute.manager [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1506.814443] env[69927]: DEBUG nova.objects.instance [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lazy-loading 'migration_context' on Instance uuid 8a907253-623b-456d-8f04-f99fc48ec58a {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1507.382819] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8cfc27f-7de3-4867-8b68-1a6ee3b02948 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.390684] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d7a1b5a-22b2-42f2-9c13-5e574ab5f20b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.421666] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c37255-9002-4dc0-a0ac-da4cc0c7b869 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.429328] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6536c515-96d9-4642-9d3f-cf9bbc0bd68b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.442514] env[69927]: DEBUG nova.compute.provider_tree [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1507.485857] env[69927]: DEBUG nova.compute.manager [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1507.512041] env[69927]: DEBUG nova.virt.hardware [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1507.512325] env[69927]: DEBUG nova.virt.hardware [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1507.512487] env[69927]: DEBUG nova.virt.hardware [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1507.512730] env[69927]: DEBUG nova.virt.hardware [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1507.512862] env[69927]: DEBUG nova.virt.hardware [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1507.513039] env[69927]: DEBUG nova.virt.hardware [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1507.513329] env[69927]: DEBUG nova.virt.hardware [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1507.513578] env[69927]: DEBUG nova.virt.hardware [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1507.513811] env[69927]: DEBUG nova.virt.hardware [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1507.514020] env[69927]: DEBUG nova.virt.hardware [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1507.514233] env[69927]: DEBUG nova.virt.hardware [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1507.515241] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cebfe57e-aaa7-4ded-92b3-cbd742b108da {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.523977] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e5c88a1-7505-4e3d-8ee5-885182559035 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.614851] env[69927]: DEBUG nova.compute.manager [req-1a57dea0-bd18-46ed-b605-6188d84e642a req-3d5374c3-b974-41a3-97ae-aea28186b380 service nova] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Received event network-vif-plugged-961d8267-5b82-4297-96a8-4806c2f9d8a5 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1507.614851] env[69927]: DEBUG oslo_concurrency.lockutils [req-1a57dea0-bd18-46ed-b605-6188d84e642a req-3d5374c3-b974-41a3-97ae-aea28186b380 service nova] Acquiring lock "eb05a071-88d4-4e51-936f-d5b7554ac204-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1507.614851] env[69927]: DEBUG oslo_concurrency.lockutils [req-1a57dea0-bd18-46ed-b605-6188d84e642a req-3d5374c3-b974-41a3-97ae-aea28186b380 service nova] Lock "eb05a071-88d4-4e51-936f-d5b7554ac204-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1507.614851] env[69927]: DEBUG oslo_concurrency.lockutils [req-1a57dea0-bd18-46ed-b605-6188d84e642a req-3d5374c3-b974-41a3-97ae-aea28186b380 service nova] Lock "eb05a071-88d4-4e51-936f-d5b7554ac204-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1507.614851] env[69927]: DEBUG nova.compute.manager [req-1a57dea0-bd18-46ed-b605-6188d84e642a req-3d5374c3-b974-41a3-97ae-aea28186b380 service nova] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] No waiting events found dispatching network-vif-plugged-961d8267-5b82-4297-96a8-4806c2f9d8a5 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1507.614851] env[69927]: WARNING nova.compute.manager [req-1a57dea0-bd18-46ed-b605-6188d84e642a req-3d5374c3-b974-41a3-97ae-aea28186b380 service nova] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Received unexpected event network-vif-plugged-961d8267-5b82-4297-96a8-4806c2f9d8a5 for instance with vm_state building and task_state spawning. [ 1507.912437] env[69927]: DEBUG nova.network.neutron [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Successfully updated port: 961d8267-5b82-4297-96a8-4806c2f9d8a5 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1507.946077] env[69927]: DEBUG nova.scheduler.client.report [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1508.415739] env[69927]: DEBUG oslo_concurrency.lockutils [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "refresh_cache-eb05a071-88d4-4e51-936f-d5b7554ac204" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1508.415919] env[69927]: DEBUG oslo_concurrency.lockutils [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired lock "refresh_cache-eb05a071-88d4-4e51-936f-d5b7554ac204" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1508.416078] env[69927]: DEBUG nova.network.neutron [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1508.948864] env[69927]: DEBUG nova.network.neutron [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1508.956905] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.646s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1509.010177] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1509.010327] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69927) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1509.080156] env[69927]: DEBUG nova.network.neutron [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Updating instance_info_cache with network_info: [{"id": "961d8267-5b82-4297-96a8-4806c2f9d8a5", "address": "fa:16:3e:db:2b:01", "network": {"id": "1b67ec75-5b1a-4408-976d-0bd585378451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1628882259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0823381b9f644adf818b490c551f5a3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap961d8267-5b", "ovs_interfaceid": "961d8267-5b82-4297-96a8-4806c2f9d8a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1509.503194] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1509.506799] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1509.506992] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1509.507160] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1509.507315] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager.update_available_resource {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1509.582829] env[69927]: DEBUG oslo_concurrency.lockutils [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Releasing lock "refresh_cache-eb05a071-88d4-4e51-936f-d5b7554ac204" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1509.583190] env[69927]: DEBUG nova.compute.manager [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Instance network_info: |[{"id": "961d8267-5b82-4297-96a8-4806c2f9d8a5", "address": "fa:16:3e:db:2b:01", "network": {"id": "1b67ec75-5b1a-4408-976d-0bd585378451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1628882259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0823381b9f644adf818b490c551f5a3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap961d8267-5b", "ovs_interfaceid": "961d8267-5b82-4297-96a8-4806c2f9d8a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1509.583966] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:2b:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0248a27a-1d7f-4195-987b-06bfc8467347', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '961d8267-5b82-4297-96a8-4806c2f9d8a5', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1509.591402] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1509.591616] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1509.591838] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-822c975d-aa3a-4253-a149-dc762423e77e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.612939] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1509.612939] env[69927]: value = "task-4097112" [ 1509.612939] env[69927]: _type = "Task" [ 1509.612939] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.621194] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097112, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.642898] env[69927]: DEBUG nova.compute.manager [req-24429a90-4492-43da-abea-9509d5c0da6e req-025ab37d-5340-46d4-965b-99df60c7b469 service nova] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Received event network-changed-961d8267-5b82-4297-96a8-4806c2f9d8a5 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1509.643073] env[69927]: DEBUG nova.compute.manager [req-24429a90-4492-43da-abea-9509d5c0da6e req-025ab37d-5340-46d4-965b-99df60c7b469 service nova] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Refreshing instance network info cache due to event network-changed-961d8267-5b82-4297-96a8-4806c2f9d8a5. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1509.643307] env[69927]: DEBUG oslo_concurrency.lockutils [req-24429a90-4492-43da-abea-9509d5c0da6e req-025ab37d-5340-46d4-965b-99df60c7b469 service nova] Acquiring lock "refresh_cache-eb05a071-88d4-4e51-936f-d5b7554ac204" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1509.643475] env[69927]: DEBUG oslo_concurrency.lockutils [req-24429a90-4492-43da-abea-9509d5c0da6e req-025ab37d-5340-46d4-965b-99df60c7b469 service nova] Acquired lock "refresh_cache-eb05a071-88d4-4e51-936f-d5b7554ac204" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1509.643656] env[69927]: DEBUG nova.network.neutron [req-24429a90-4492-43da-abea-9509d5c0da6e req-025ab37d-5340-46d4-965b-99df60c7b469 service nova] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Refreshing network info cache for port 961d8267-5b82-4297-96a8-4806c2f9d8a5 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1510.010836] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1510.011196] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1510.011289] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1510.011461] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69927) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1510.012423] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba7bfae-5a22-4031-8d2b-81962f94b548 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.020612] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b37c4e4a-4e2d-48bf-9296-c2dddb40691d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.035636] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec293dd4-0f3c-48da-acd8-6f0430636439 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.041961] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee1ce4c-9c6f-4739-a960-26222bf82978 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.072554] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180612MB free_disk=17GB free_vcpus=48 pci_devices=None {{(pid=69927) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1510.072764] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1510.072913] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1510.122260] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097112, 'name': CreateVM_Task, 'duration_secs': 0.342037} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.122432] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1510.123133] env[69927]: DEBUG oslo_concurrency.lockutils [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1510.123322] env[69927]: DEBUG oslo_concurrency.lockutils [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1510.123661] env[69927]: DEBUG oslo_concurrency.lockutils [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1510.123908] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48c5e36b-c63c-4fe9-8451-2bb5ab009216 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.128547] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1510.128547] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521edf22-0fb0-4ba3-4dff-899cb9e976d9" [ 1510.128547] env[69927]: _type = "Task" [ 1510.128547] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.137087] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521edf22-0fb0-4ba3-4dff-899cb9e976d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.326504] env[69927]: DEBUG nova.network.neutron [req-24429a90-4492-43da-abea-9509d5c0da6e req-025ab37d-5340-46d4-965b-99df60c7b469 service nova] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Updated VIF entry in instance network info cache for port 961d8267-5b82-4297-96a8-4806c2f9d8a5. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1510.326869] env[69927]: DEBUG nova.network.neutron [req-24429a90-4492-43da-abea-9509d5c0da6e req-025ab37d-5340-46d4-965b-99df60c7b469 service nova] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Updating instance_info_cache with network_info: [{"id": "961d8267-5b82-4297-96a8-4806c2f9d8a5", "address": "fa:16:3e:db:2b:01", "network": {"id": "1b67ec75-5b1a-4408-976d-0bd585378451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1628882259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0823381b9f644adf818b490c551f5a3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap961d8267-5b", "ovs_interfaceid": "961d8267-5b82-4297-96a8-4806c2f9d8a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1510.561666] env[69927]: INFO nova.compute.manager [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Swapping old allocation on dict_keys(['2f529b36-df5f-4b37-8103-68f74f737726']) held by migration c25f4414-b057-4935-8165-60ca0e0f12e0 for instance [ 1510.590141] env[69927]: DEBUG nova.scheduler.client.report [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Overwriting current allocation {'allocations': {'2f529b36-df5f-4b37-8103-68f74f737726': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 184}}, 'project_id': 'dee421c661394f3fbf8d69a575f095a9', 'user_id': '2d854e5435164764b5b69b9c7262398f', 'consumer_generation': 1} on consumer 8a907253-623b-456d-8f04-f99fc48ec58a {{(pid=69927) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1510.642915] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]521edf22-0fb0-4ba3-4dff-899cb9e976d9, 'name': SearchDatastore_Task, 'duration_secs': 0.010728} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.643826] env[69927]: DEBUG oslo_concurrency.lockutils [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1510.643826] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1510.644064] env[69927]: DEBUG oslo_concurrency.lockutils [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1510.644251] env[69927]: DEBUG oslo_concurrency.lockutils [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1510.644490] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1510.644785] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-449b4593-0428-4cd3-8b81-23eeb0722f94 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.654518] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1510.654834] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1510.655665] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1ebeaf9-7047-47c6-8e91-d1a3932659db {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.662673] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1510.662673] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522fa46f-3741-1dc3-c719-e66ce723eb58" [ 1510.662673] env[69927]: _type = "Task" [ 1510.662673] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.670947] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522fa46f-3741-1dc3-c719-e66ce723eb58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.671896] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "refresh_cache-8a907253-623b-456d-8f04-f99fc48ec58a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1510.672078] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired lock "refresh_cache-8a907253-623b-456d-8f04-f99fc48ec58a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1510.672283] env[69927]: DEBUG nova.network.neutron [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1510.829942] env[69927]: DEBUG oslo_concurrency.lockutils [req-24429a90-4492-43da-abea-9509d5c0da6e req-025ab37d-5340-46d4-965b-99df60c7b469 service nova] Releasing lock "refresh_cache-eb05a071-88d4-4e51-936f-d5b7554ac204" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1511.095138] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1511.095469] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance eb05a071-88d4-4e51-936f-d5b7554ac204 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1511.095469] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 8a907253-623b-456d-8f04-f99fc48ec58a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1511.095576] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1511.095716] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1511.143368] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdaee209-1b6a-4976-849e-8013bc9c2852 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.150889] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89cc3309-e3dc-4509-841f-dd821718f43a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.189498] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95f5634-0dd4-4beb-916c-79cd5fba519f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.199010] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]522fa46f-3741-1dc3-c719-e66ce723eb58, 'name': SearchDatastore_Task, 'duration_secs': 0.009853} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.201785] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-171ccf59-8b02-444b-a001-f47ed5eba093 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.205044] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cac34df4-a4a3-4b2b-bc63-1acfa5cea7d0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.212530] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1511.212530] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529ec1c2-38b0-92c6-0fcd-16559ede4242" [ 1511.212530] env[69927]: _type = "Task" [ 1511.212530] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.220227] env[69927]: DEBUG nova.compute.provider_tree [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1511.232972] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529ec1c2-38b0-92c6-0fcd-16559ede4242, 'name': SearchDatastore_Task, 'duration_secs': 0.010442} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.233343] env[69927]: DEBUG oslo_concurrency.lockutils [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1511.233656] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] eb05a071-88d4-4e51-936f-d5b7554ac204/eb05a071-88d4-4e51-936f-d5b7554ac204.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1511.233921] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-415ebbcb-0c77-4fd9-8320-9dfb0741748e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.240774] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1511.240774] env[69927]: value = "task-4097113" [ 1511.240774] env[69927]: _type = "Task" [ 1511.240774] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.249508] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097113, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.422931] env[69927]: DEBUG nova.network.neutron [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Updating instance_info_cache with network_info: [{"id": "58b7f909-34e0-488c-b45d-808eea3e0be8", "address": "fa:16:3e:d6:56:16", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58b7f909-34", "ovs_interfaceid": "58b7f909-34e0-488c-b45d-808eea3e0be8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1511.727617] env[69927]: DEBUG nova.scheduler.client.report [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1511.751175] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097113, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.453693} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.751440] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore1] eb05a071-88d4-4e51-936f-d5b7554ac204/eb05a071-88d4-4e51-936f-d5b7554ac204.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1511.751657] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1511.751904] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-26d7a3c9-1935-44cd-9a28-78d2c1050062 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.759934] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1511.759934] env[69927]: value = "task-4097114" [ 1511.759934] env[69927]: _type = "Task" [ 1511.759934] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.768800] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097114, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.926302] env[69927]: DEBUG oslo_concurrency.lockutils [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Releasing lock "refresh_cache-8a907253-623b-456d-8f04-f99fc48ec58a" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1511.926847] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1511.927258] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9333b4cd-eed7-4313-8e4b-fba08342afe7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.935031] env[69927]: DEBUG oslo_vmware.api [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1511.935031] env[69927]: value = "task-4097115" [ 1511.935031] env[69927]: _type = "Task" [ 1511.935031] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.946802] env[69927]: DEBUG oslo_vmware.api [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097115, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.232862] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69927) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1512.233453] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.160s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1512.269735] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097114, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06172} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.269991] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1512.270746] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5305ad-7f08-403e-ad28-a4e0e2b99c09 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.292870] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] eb05a071-88d4-4e51-936f-d5b7554ac204/eb05a071-88d4-4e51-936f-d5b7554ac204.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1512.293183] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4dbdfe15-6a52-403e-979b-50d140f9f327 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.312637] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1512.312637] env[69927]: value = "task-4097116" [ 1512.312637] env[69927]: _type = "Task" [ 1512.312637] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.320838] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097116, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.445718] env[69927]: DEBUG oslo_vmware.api [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097115, 'name': PowerOffVM_Task, 'duration_secs': 0.248925} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.445972] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1512.446655] env[69927]: DEBUG nova.virt.hardware [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1512.446874] env[69927]: DEBUG nova.virt.hardware [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1512.447043] env[69927]: DEBUG nova.virt.hardware [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1512.447236] env[69927]: DEBUG nova.virt.hardware [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1512.447387] env[69927]: DEBUG nova.virt.hardware [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1512.447536] env[69927]: DEBUG nova.virt.hardware [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1512.447769] env[69927]: DEBUG nova.virt.hardware [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1512.447933] env[69927]: DEBUG nova.virt.hardware [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1512.448115] env[69927]: DEBUG nova.virt.hardware [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1512.448314] env[69927]: DEBUG nova.virt.hardware [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1512.448583] env[69927]: DEBUG nova.virt.hardware [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1512.453596] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-871cf51d-1650-421e-9595-d8260ab0634d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.469099] env[69927]: DEBUG oslo_vmware.api [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1512.469099] env[69927]: value = "task-4097117" [ 1512.469099] env[69927]: _type = "Task" [ 1512.469099] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.477239] env[69927]: DEBUG oslo_vmware.api [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097117, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.826152] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097116, 'name': ReconfigVM_Task, 'duration_secs': 0.495554} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.826555] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Reconfigured VM instance instance-0000007d to attach disk [datastore1] eb05a071-88d4-4e51-936f-d5b7554ac204/eb05a071-88d4-4e51-936f-d5b7554ac204.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1512.827432] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a199171d-c6da-4925-954a-e251448bc7d3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.835631] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1512.835631] env[69927]: value = "task-4097118" [ 1512.835631] env[69927]: _type = "Task" [ 1512.835631] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.847877] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097118, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.979499] env[69927]: DEBUG oslo_vmware.api [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097117, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.233508] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1513.346444] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097118, 'name': Rename_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.480746] env[69927]: DEBUG oslo_vmware.api [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097117, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.846701] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097118, 'name': Rename_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.981079] env[69927]: DEBUG oslo_vmware.api [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097117, 'name': ReconfigVM_Task, 'duration_secs': 1.168881} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.981643] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa946f74-bec2-4598-a570-7e3650f313d9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.999581] env[69927]: DEBUG nova.virt.hardware [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1513.999814] env[69927]: DEBUG nova.virt.hardware [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1513.999969] env[69927]: DEBUG nova.virt.hardware [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1514.000188] env[69927]: DEBUG nova.virt.hardware [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1514.000345] env[69927]: DEBUG nova.virt.hardware [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1514.000493] env[69927]: DEBUG nova.virt.hardware [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1514.000696] env[69927]: DEBUG nova.virt.hardware [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1514.000853] env[69927]: DEBUG nova.virt.hardware [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1514.001030] env[69927]: DEBUG nova.virt.hardware [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1514.001197] env[69927]: DEBUG nova.virt.hardware [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1514.001367] env[69927]: DEBUG nova.virt.hardware [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1514.002135] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d22cdaab-f6f9-467c-a908-867523024b91 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.008657] env[69927]: DEBUG oslo_vmware.api [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1514.008657] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52db97fa-2ad4-3908-ed64-e38d4b370f2b" [ 1514.008657] env[69927]: _type = "Task" [ 1514.008657] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.018184] env[69927]: DEBUG oslo_vmware.api [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52db97fa-2ad4-3908-ed64-e38d4b370f2b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.347060] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097118, 'name': Rename_Task, 'duration_secs': 1.057729} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.347485] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1514.347593] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c1c130f2-e128-46dc-b1a7-c5b164b2569c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.353755] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1514.353755] env[69927]: value = "task-4097119" [ 1514.353755] env[69927]: _type = "Task" [ 1514.353755] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.361387] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097119, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.520021] env[69927]: DEBUG oslo_vmware.api [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52db97fa-2ad4-3908-ed64-e38d4b370f2b, 'name': SearchDatastore_Task, 'duration_secs': 0.008859} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.525826] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Reconfiguring VM instance instance-0000007c to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1514.526171] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a36c62e3-5d3f-481c-b66f-9e0b3398a707 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.545792] env[69927]: DEBUG oslo_vmware.api [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1514.545792] env[69927]: value = "task-4097120" [ 1514.545792] env[69927]: _type = "Task" [ 1514.545792] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.554598] env[69927]: DEBUG oslo_vmware.api [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097120, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.863584] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097119, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.056412] env[69927]: DEBUG oslo_vmware.api [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097120, 'name': ReconfigVM_Task, 'duration_secs': 0.17766} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.056712] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Reconfigured VM instance instance-0000007c to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1515.057493] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd65900-41e8-4c06-9109-7ee77ffaf5bd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.079783] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] 8a907253-623b-456d-8f04-f99fc48ec58a/8a907253-623b-456d-8f04-f99fc48ec58a.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1515.080115] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d57d9fdc-55d6-42c7-be5f-0eee09ff0b4a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.098682] env[69927]: DEBUG oslo_vmware.api [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1515.098682] env[69927]: value = "task-4097121" [ 1515.098682] env[69927]: _type = "Task" [ 1515.098682] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.107209] env[69927]: DEBUG oslo_vmware.api [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097121, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.366414] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097119, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.609307] env[69927]: DEBUG oslo_vmware.api [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097121, 'name': ReconfigVM_Task, 'duration_secs': 0.278952} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.609588] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Reconfigured VM instance instance-0000007c to attach disk [datastore2] 8a907253-623b-456d-8f04-f99fc48ec58a/8a907253-623b-456d-8f04-f99fc48ec58a.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1515.610466] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7308c758-92c5-40b2-977c-c996078570ea {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.628751] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0522f7a5-b829-41bb-beac-519d544f66ad {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.648485] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7206afab-098b-4eb6-b1e0-fde7dae29e4e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.667100] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f570b9f2-4329-45f4-89a5-7de23e0c27fe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.674908] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1515.675219] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e7fb4df3-1025-4d38-9511-2ba2488d3a7a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.683160] env[69927]: DEBUG oslo_vmware.api [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1515.683160] env[69927]: value = "task-4097122" [ 1515.683160] env[69927]: _type = "Task" [ 1515.683160] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.691877] env[69927]: DEBUG oslo_vmware.api [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097122, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.865909] env[69927]: DEBUG oslo_vmware.api [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097119, 'name': PowerOnVM_Task, 'duration_secs': 1.052164} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.866236] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1515.866476] env[69927]: INFO nova.compute.manager [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Took 8.38 seconds to spawn the instance on the hypervisor. [ 1515.866771] env[69927]: DEBUG nova.compute.manager [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1515.867524] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fb10ed2-65f2-488b-bc78-33d508b6b08a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.193755] env[69927]: DEBUG oslo_vmware.api [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097122, 'name': PowerOnVM_Task, 'duration_secs': 0.401989} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.194039] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1516.389967] env[69927]: INFO nova.compute.manager [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Took 13.08 seconds to build instance. [ 1516.892539] env[69927]: DEBUG oslo_concurrency.lockutils [None req-479a08e8-b9ca-4aa9-9947-d67a233c764b tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "eb05a071-88d4-4e51-936f-d5b7554ac204" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.591s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1517.205303] env[69927]: INFO nova.compute.manager [None req-f66b888d-0f18-43a9-afd4-275b24b0fd12 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Updating instance to original state: 'active' [ 1517.375612] env[69927]: DEBUG nova.compute.manager [req-3ead21e0-cbe2-47e0-9270-97686b1c62ca req-a3d56770-15b4-49b5-8f09-c6f4c1cd4528 service nova] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Received event network-changed-961d8267-5b82-4297-96a8-4806c2f9d8a5 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1517.375833] env[69927]: DEBUG nova.compute.manager [req-3ead21e0-cbe2-47e0-9270-97686b1c62ca req-a3d56770-15b4-49b5-8f09-c6f4c1cd4528 service nova] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Refreshing instance network info cache due to event network-changed-961d8267-5b82-4297-96a8-4806c2f9d8a5. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1517.376082] env[69927]: DEBUG oslo_concurrency.lockutils [req-3ead21e0-cbe2-47e0-9270-97686b1c62ca req-a3d56770-15b4-49b5-8f09-c6f4c1cd4528 service nova] Acquiring lock "refresh_cache-eb05a071-88d4-4e51-936f-d5b7554ac204" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1517.376259] env[69927]: DEBUG oslo_concurrency.lockutils [req-3ead21e0-cbe2-47e0-9270-97686b1c62ca req-a3d56770-15b4-49b5-8f09-c6f4c1cd4528 service nova] Acquired lock "refresh_cache-eb05a071-88d4-4e51-936f-d5b7554ac204" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1517.376475] env[69927]: DEBUG nova.network.neutron [req-3ead21e0-cbe2-47e0-9270-97686b1c62ca req-a3d56770-15b4-49b5-8f09-c6f4c1cd4528 service nova] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Refreshing network info cache for port 961d8267-5b82-4297-96a8-4806c2f9d8a5 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1518.084400] env[69927]: DEBUG nova.network.neutron [req-3ead21e0-cbe2-47e0-9270-97686b1c62ca req-a3d56770-15b4-49b5-8f09-c6f4c1cd4528 service nova] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Updated VIF entry in instance network info cache for port 961d8267-5b82-4297-96a8-4806c2f9d8a5. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1518.084764] env[69927]: DEBUG nova.network.neutron [req-3ead21e0-cbe2-47e0-9270-97686b1c62ca req-a3d56770-15b4-49b5-8f09-c6f4c1cd4528 service nova] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Updating instance_info_cache with network_info: [{"id": "961d8267-5b82-4297-96a8-4806c2f9d8a5", "address": "fa:16:3e:db:2b:01", "network": {"id": "1b67ec75-5b1a-4408-976d-0bd585378451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1628882259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0823381b9f644adf818b490c551f5a3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap961d8267-5b", "ovs_interfaceid": "961d8267-5b82-4297-96a8-4806c2f9d8a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1518.168427] env[69927]: DEBUG oslo_concurrency.lockutils [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "8a907253-623b-456d-8f04-f99fc48ec58a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1518.168719] env[69927]: DEBUG oslo_concurrency.lockutils [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "8a907253-623b-456d-8f04-f99fc48ec58a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1518.169046] env[69927]: DEBUG oslo_concurrency.lockutils [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "8a907253-623b-456d-8f04-f99fc48ec58a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1518.169300] env[69927]: DEBUG oslo_concurrency.lockutils [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "8a907253-623b-456d-8f04-f99fc48ec58a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1518.169532] env[69927]: DEBUG oslo_concurrency.lockutils [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "8a907253-623b-456d-8f04-f99fc48ec58a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1518.172336] env[69927]: INFO nova.compute.manager [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Terminating instance [ 1518.588093] env[69927]: DEBUG oslo_concurrency.lockutils [req-3ead21e0-cbe2-47e0-9270-97686b1c62ca req-a3d56770-15b4-49b5-8f09-c6f4c1cd4528 service nova] Releasing lock "refresh_cache-eb05a071-88d4-4e51-936f-d5b7554ac204" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1518.676351] env[69927]: DEBUG nova.compute.manager [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1518.676619] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1518.677542] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-277aa7ad-0608-4a43-b957-bb367fee81f5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.686437] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1518.686744] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4ae9cefe-e5d4-45e1-9439-e5984b979276 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.693975] env[69927]: DEBUG oslo_vmware.api [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1518.693975] env[69927]: value = "task-4097124" [ 1518.693975] env[69927]: _type = "Task" [ 1518.693975] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.702723] env[69927]: DEBUG oslo_vmware.api [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097124, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.203818] env[69927]: DEBUG oslo_vmware.api [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097124, 'name': PowerOffVM_Task, 'duration_secs': 0.234745} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.204217] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1519.204324] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1519.204572] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f2db18b0-6bcd-4f9f-b942-d85b52803cf5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.508089] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1519.508289] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Cleaning up deleted instances with incomplete migration {{(pid=69927) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 1521.011710] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1521.012123] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Cleaning up deleted instances {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1521.520721] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] There are 27 instances to clean {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1521.520933] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: f4512f10-36bf-4277-acb7-e09a2a0d2a37] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1522.024489] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 7a1f95d9-892e-492d-acbe-d70b56c36698] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1522.527756] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 95c47fc8-fed0-4b55-8f65-61b46861e51d] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1523.031674] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 9e9e93cc-e225-4ec7-850f-916aa078ba30] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1523.087886] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1523.088134] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1523.088134] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Deleting the datastore file [datastore2] 8a907253-623b-456d-8f04-f99fc48ec58a {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1523.088446] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1376754-d876-4bad-be7d-7acfda1b122d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.096864] env[69927]: DEBUG oslo_vmware.api [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1523.096864] env[69927]: value = "task-4097127" [ 1523.096864] env[69927]: _type = "Task" [ 1523.096864] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.106323] env[69927]: DEBUG oslo_vmware.api [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097127, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.535072] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 6bfafa83-a9e2-4f7d-bbad-6b356f173b68] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1523.606624] env[69927]: DEBUG oslo_vmware.api [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097127, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.038800] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: eb84d52d-7153-412b-9ed9-4b7986cdfbbf] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1524.107254] env[69927]: DEBUG oslo_vmware.api [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097127, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.578815} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.107600] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1524.107721] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1524.107899] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1524.108084] env[69927]: INFO nova.compute.manager [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Took 5.43 seconds to destroy the instance on the hypervisor. [ 1524.108351] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1524.108543] env[69927]: DEBUG nova.compute.manager [-] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1524.108639] env[69927]: DEBUG nova.network.neutron [-] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1524.543980] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 923b70fc-9959-48cf-8a9f-f8cd7c0c6b19] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1524.575690] env[69927]: DEBUG nova.compute.manager [req-95f36e45-3498-499a-85a9-a27adfaa5faa req-25953871-b698-4064-84ff-3dbf267e6f49 service nova] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Received event network-vif-deleted-58b7f909-34e0-488c-b45d-808eea3e0be8 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1524.575690] env[69927]: INFO nova.compute.manager [req-95f36e45-3498-499a-85a9-a27adfaa5faa req-25953871-b698-4064-84ff-3dbf267e6f49 service nova] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Neutron deleted interface 58b7f909-34e0-488c-b45d-808eea3e0be8; detaching it from the instance and deleting it from the info cache [ 1524.575690] env[69927]: DEBUG nova.network.neutron [req-95f36e45-3498-499a-85a9-a27adfaa5faa req-25953871-b698-4064-84ff-3dbf267e6f49 service nova] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1525.047686] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 8a75b5f9-a54a-43ff-a9a4-ad9a3461eb11] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1525.050308] env[69927]: DEBUG nova.network.neutron [-] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1525.079298] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1a473f93-460a-477a-995c-7ce47f9014f2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.089689] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-583c8cae-b88c-4d3e-905f-7a25752ba512 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.116076] env[69927]: DEBUG nova.compute.manager [req-95f36e45-3498-499a-85a9-a27adfaa5faa req-25953871-b698-4064-84ff-3dbf267e6f49 service nova] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Detach interface failed, port_id=58b7f909-34e0-488c-b45d-808eea3e0be8, reason: Instance 8a907253-623b-456d-8f04-f99fc48ec58a could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1525.552770] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 853d85d4-e98f-4810-a8db-b2a820ebc071] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1525.554877] env[69927]: INFO nova.compute.manager [-] [instance: 8a907253-623b-456d-8f04-f99fc48ec58a] Took 1.45 seconds to deallocate network for instance. [ 1526.062489] env[69927]: DEBUG oslo_concurrency.lockutils [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1526.062875] env[69927]: DEBUG oslo_concurrency.lockutils [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1526.063024] env[69927]: DEBUG nova.objects.instance [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lazy-loading 'resources' on Instance uuid 8a907253-623b-456d-8f04-f99fc48ec58a {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1526.064968] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 659e2584-88a8-4382-98c8-f50fcab78e0c] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1526.570379] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 80c5c2b3-19bb-4202-bee5-1bdb9f8ed14d] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1526.626981] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85aa2b8b-c10f-4e9a-a3fa-05dadfbf4717 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.635593] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f1cf0e-c1a6-49f1-b10c-c1ab7fbce6fd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.666648] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae85db53-79d2-466b-a0b7-012bae62fa6f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.674577] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3e1578a-58e2-4a03-af00-68e99fbf3b7e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.688254] env[69927]: DEBUG nova.compute.provider_tree [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1527.074426] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: afdd23d0-c8e0-4d49-a188-525b6b3f31c8] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1527.192061] env[69927]: DEBUG nova.scheduler.client.report [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1527.578743] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: e38222c4-3362-4d47-aee4-d26ccb4cbf3c] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1527.696774] env[69927]: DEBUG oslo_concurrency.lockutils [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.634s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1527.721286] env[69927]: INFO nova.scheduler.client.report [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Deleted allocations for instance 8a907253-623b-456d-8f04-f99fc48ec58a [ 1528.082708] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 2a0a6870-47ad-4958-afed-bdbda3e54c21] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1528.230096] env[69927]: DEBUG oslo_concurrency.lockutils [None req-17e2839a-158e-443b-8edb-f789e5bfd02a tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "8a907253-623b-456d-8f04-f99fc48ec58a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.061s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1528.587016] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 07814f60-1886-4b06-bcf7-e2c9b95a4501] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1529.091117] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: b007a697-7da4-4c97-9ccb-046d86b27568] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1529.595166] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 8b70b479-4a54-4bcb-813d-16cc0c9a67c5] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1529.673463] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1529.673657] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1530.098758] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 56aec5c2-d344-4a8d-a55a-930bc425150a] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1530.176090] env[69927]: DEBUG nova.compute.manager [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1530.601968] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 5c87c74d-5998-4dfc-bc3c-c2887ff25195] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1530.699404] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1530.699678] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1530.701252] env[69927]: INFO nova.compute.claims [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1531.105303] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: da468d11-82a4-4fec-b06a-1b522bacdbc2] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1531.608871] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: d548ea75-9c1f-4884-b338-194f1b5d62ef] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1531.759618] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee18c1b-ca79-455a-9496-6f5456afc9f9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.767984] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c203660e-5875-4080-9a0a-33b460db0195 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.798025] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17684a6e-78b4-4638-b4f5-9f12344a4eac {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.805658] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae94eb8d-3566-4785-b001-bdb925b058ff {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.818983] env[69927]: DEBUG nova.compute.provider_tree [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1532.113549] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 7ff17f1d-31fd-440b-906c-2719770a9151] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1532.322283] env[69927]: DEBUG nova.scheduler.client.report [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1532.617328] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 693a6c6b-8d1c-405e-bb17-73259e28f556] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1532.828014] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.128s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1532.828705] env[69927]: DEBUG nova.compute.manager [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1533.122054] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: cff307ed-3c8b-4126-9749-1204597cbf6c] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1533.335728] env[69927]: DEBUG nova.compute.utils [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1533.337223] env[69927]: DEBUG nova.compute.manager [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1533.337397] env[69927]: DEBUG nova.network.neutron [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1533.384892] env[69927]: DEBUG nova.policy [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d854e5435164764b5b69b9c7262398f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dee421c661394f3fbf8d69a575f095a9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1533.627257] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 15c44d86-829f-4317-ab66-9e61d4fb4dd0] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1533.670279] env[69927]: DEBUG nova.network.neutron [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Successfully created port: 4c7c3743-a31a-4f05-9380-5e133b888b77 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1533.843463] env[69927]: DEBUG nova.compute.manager [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1534.130100] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: b7ee5c8b-41fb-4d1d-935e-8acc7ba8e15a] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1534.633693] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: cdb6a451-cf98-47d4-8cfe-7e1fefd6ca1a] Instance has had 0 of 5 cleanup attempts {{(pid=69927) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1534.851568] env[69927]: DEBUG nova.compute.manager [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1534.879333] env[69927]: DEBUG nova.virt.hardware [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1534.879609] env[69927]: DEBUG nova.virt.hardware [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1534.879787] env[69927]: DEBUG nova.virt.hardware [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1534.879970] env[69927]: DEBUG nova.virt.hardware [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1534.880136] env[69927]: DEBUG nova.virt.hardware [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1534.880284] env[69927]: DEBUG nova.virt.hardware [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1534.880495] env[69927]: DEBUG nova.virt.hardware [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1534.880651] env[69927]: DEBUG nova.virt.hardware [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1534.880819] env[69927]: DEBUG nova.virt.hardware [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1534.880979] env[69927]: DEBUG nova.virt.hardware [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1534.881172] env[69927]: DEBUG nova.virt.hardware [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1534.882057] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56808366-b361-4003-94e3-0ec96a9a5143 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.889999] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4a17b45-7f17-460a-b049-4fbef88a31a1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.040578] env[69927]: DEBUG nova.compute.manager [req-e9cfdfd9-5a0d-4fc2-8ee0-cbc69c84316a req-648cb890-a375-4f78-ae12-9953d484da8d service nova] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Received event network-vif-plugged-4c7c3743-a31a-4f05-9380-5e133b888b77 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1535.040825] env[69927]: DEBUG oslo_concurrency.lockutils [req-e9cfdfd9-5a0d-4fc2-8ee0-cbc69c84316a req-648cb890-a375-4f78-ae12-9953d484da8d service nova] Acquiring lock "b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1535.041048] env[69927]: DEBUG oslo_concurrency.lockutils [req-e9cfdfd9-5a0d-4fc2-8ee0-cbc69c84316a req-648cb890-a375-4f78-ae12-9953d484da8d service nova] Lock "b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1535.041231] env[69927]: DEBUG oslo_concurrency.lockutils [req-e9cfdfd9-5a0d-4fc2-8ee0-cbc69c84316a req-648cb890-a375-4f78-ae12-9953d484da8d service nova] Lock "b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1535.041397] env[69927]: DEBUG nova.compute.manager [req-e9cfdfd9-5a0d-4fc2-8ee0-cbc69c84316a req-648cb890-a375-4f78-ae12-9953d484da8d service nova] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] No waiting events found dispatching network-vif-plugged-4c7c3743-a31a-4f05-9380-5e133b888b77 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1535.041561] env[69927]: WARNING nova.compute.manager [req-e9cfdfd9-5a0d-4fc2-8ee0-cbc69c84316a req-648cb890-a375-4f78-ae12-9953d484da8d service nova] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Received unexpected event network-vif-plugged-4c7c3743-a31a-4f05-9380-5e133b888b77 for instance with vm_state building and task_state spawning. [ 1535.130787] env[69927]: DEBUG nova.network.neutron [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Successfully updated port: 4c7c3743-a31a-4f05-9380-5e133b888b77 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1535.633840] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "refresh_cache-b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1535.633840] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired lock "refresh_cache-b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1535.633840] env[69927]: DEBUG nova.network.neutron [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1536.166437] env[69927]: DEBUG nova.network.neutron [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1536.291189] env[69927]: DEBUG nova.network.neutron [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Updating instance_info_cache with network_info: [{"id": "4c7c3743-a31a-4f05-9380-5e133b888b77", "address": "fa:16:3e:61:ca:ab", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c7c3743-a3", "ovs_interfaceid": "4c7c3743-a31a-4f05-9380-5e133b888b77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1536.794592] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Releasing lock "refresh_cache-b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1536.794774] env[69927]: DEBUG nova.compute.manager [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Instance network_info: |[{"id": "4c7c3743-a31a-4f05-9380-5e133b888b77", "address": "fa:16:3e:61:ca:ab", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c7c3743-a3", "ovs_interfaceid": "4c7c3743-a31a-4f05-9380-5e133b888b77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1536.795227] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:ca:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '57691231-2b8d-4d71-8f79-d4a6a1d95ec8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4c7c3743-a31a-4f05-9380-5e133b888b77', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1536.802676] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1536.802903] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1536.803148] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ab907d91-b47e-4ce3-94d2-87336e6347d4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.823732] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1536.823732] env[69927]: value = "task-4097134" [ 1536.823732] env[69927]: _type = "Task" [ 1536.823732] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.831791] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097134, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.069303] env[69927]: DEBUG nova.compute.manager [req-1b6e9e6e-2bab-4ee9-9b5c-0e8c36753c77 req-2f14e507-c29b-4fd1-ae2a-423138b32492 service nova] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Received event network-changed-4c7c3743-a31a-4f05-9380-5e133b888b77 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1537.069537] env[69927]: DEBUG nova.compute.manager [req-1b6e9e6e-2bab-4ee9-9b5c-0e8c36753c77 req-2f14e507-c29b-4fd1-ae2a-423138b32492 service nova] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Refreshing instance network info cache due to event network-changed-4c7c3743-a31a-4f05-9380-5e133b888b77. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1537.069771] env[69927]: DEBUG oslo_concurrency.lockutils [req-1b6e9e6e-2bab-4ee9-9b5c-0e8c36753c77 req-2f14e507-c29b-4fd1-ae2a-423138b32492 service nova] Acquiring lock "refresh_cache-b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1537.069918] env[69927]: DEBUG oslo_concurrency.lockutils [req-1b6e9e6e-2bab-4ee9-9b5c-0e8c36753c77 req-2f14e507-c29b-4fd1-ae2a-423138b32492 service nova] Acquired lock "refresh_cache-b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1537.070192] env[69927]: DEBUG nova.network.neutron [req-1b6e9e6e-2bab-4ee9-9b5c-0e8c36753c77 req-2f14e507-c29b-4fd1-ae2a-423138b32492 service nova] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Refreshing network info cache for port 4c7c3743-a31a-4f05-9380-5e133b888b77 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1537.333915] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097134, 'name': CreateVM_Task, 'duration_secs': 0.320085} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.334288] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1537.334720] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1537.334888] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1537.335226] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1537.335491] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3583d850-dfe2-4e61-ad91-26114a75e229 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.340416] env[69927]: DEBUG oslo_vmware.api [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1537.340416] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529ecb06-4d04-cab7-eb71-393e17629f83" [ 1537.340416] env[69927]: _type = "Task" [ 1537.340416] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.347915] env[69927]: DEBUG oslo_vmware.api [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529ecb06-4d04-cab7-eb71-393e17629f83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.715783] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1537.716012] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1537.773246] env[69927]: DEBUG nova.network.neutron [req-1b6e9e6e-2bab-4ee9-9b5c-0e8c36753c77 req-2f14e507-c29b-4fd1-ae2a-423138b32492 service nova] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Updated VIF entry in instance network info cache for port 4c7c3743-a31a-4f05-9380-5e133b888b77. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1537.773657] env[69927]: DEBUG nova.network.neutron [req-1b6e9e6e-2bab-4ee9-9b5c-0e8c36753c77 req-2f14e507-c29b-4fd1-ae2a-423138b32492 service nova] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Updating instance_info_cache with network_info: [{"id": "4c7c3743-a31a-4f05-9380-5e133b888b77", "address": "fa:16:3e:61:ca:ab", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c7c3743-a3", "ovs_interfaceid": "4c7c3743-a31a-4f05-9380-5e133b888b77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1537.851526] env[69927]: DEBUG oslo_vmware.api [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529ecb06-4d04-cab7-eb71-393e17629f83, 'name': SearchDatastore_Task, 'duration_secs': 0.010498} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.851773] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1537.852010] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1537.852255] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1537.852401] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1537.852612] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1537.852868] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e2d5a4f3-18c0-4e76-a933-750539603752 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.861745] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1537.861922] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1537.862669] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e7568fb-b73e-491b-8adf-e2f2c60198ee {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.868344] env[69927]: DEBUG oslo_vmware.api [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1537.868344] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]524532f0-3e42-d7ea-519b-ce23976f24ae" [ 1537.868344] env[69927]: _type = "Task" [ 1537.868344] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.876263] env[69927]: DEBUG oslo_vmware.api [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]524532f0-3e42-d7ea-519b-ce23976f24ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.219020] env[69927]: DEBUG nova.compute.manager [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1538.276431] env[69927]: DEBUG oslo_concurrency.lockutils [req-1b6e9e6e-2bab-4ee9-9b5c-0e8c36753c77 req-2f14e507-c29b-4fd1-ae2a-423138b32492 service nova] Releasing lock "refresh_cache-b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1538.378595] env[69927]: DEBUG oslo_vmware.api [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]524532f0-3e42-d7ea-519b-ce23976f24ae, 'name': SearchDatastore_Task, 'duration_secs': 0.00895} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.379400] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63b49080-5f2c-494f-921a-e15de4295fd8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.384723] env[69927]: DEBUG oslo_vmware.api [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1538.384723] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52960c20-d374-72ff-fdcc-b7cba6fd55c2" [ 1538.384723] env[69927]: _type = "Task" [ 1538.384723] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.392406] env[69927]: DEBUG oslo_vmware.api [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52960c20-d374-72ff-fdcc-b7cba6fd55c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.744196] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1538.744483] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1538.746712] env[69927]: INFO nova.compute.claims [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1538.896071] env[69927]: DEBUG oslo_vmware.api [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52960c20-d374-72ff-fdcc-b7cba6fd55c2, 'name': SearchDatastore_Task, 'duration_secs': 0.00959} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.896269] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1538.896550] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62/b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1538.896805] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fbb5ecca-6fa5-406d-82de-45f37ef53092 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.903678] env[69927]: DEBUG oslo_vmware.api [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1538.903678] env[69927]: value = "task-4097135" [ 1538.903678] env[69927]: _type = "Task" [ 1538.903678] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.911755] env[69927]: DEBUG oslo_vmware.api [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097135, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.413988] env[69927]: DEBUG oslo_vmware.api [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097135, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488355} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.414377] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62/b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1539.414483] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1539.414664] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-11aa43d9-ebe3-4cf0-a551-e3c34d145e88 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.421766] env[69927]: DEBUG oslo_vmware.api [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1539.421766] env[69927]: value = "task-4097136" [ 1539.421766] env[69927]: _type = "Task" [ 1539.421766] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.429462] env[69927]: DEBUG oslo_vmware.api [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097136, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.812696] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f598dbc9-4320-4e08-b045-dd0c9b9ffa96 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.820498] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f4a83bc-914a-4307-b0b1-3fed88387a63 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.850071] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2739bb1-fe43-485f-b68c-c9c4428d1ef9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.857561] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-689b5939-b1ba-45dc-b1fc-1c6cb6495510 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.871735] env[69927]: DEBUG nova.compute.provider_tree [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1539.931138] env[69927]: DEBUG oslo_vmware.api [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097136, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080676} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.931415] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1539.932203] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-384db1b1-181c-42ba-a565-81737e713a51 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.954392] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62/b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1539.954672] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7303c842-dac8-40f0-b244-68c6b9289836 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.974424] env[69927]: DEBUG oslo_vmware.api [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1539.974424] env[69927]: value = "task-4097137" [ 1539.974424] env[69927]: _type = "Task" [ 1539.974424] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.982283] env[69927]: DEBUG oslo_vmware.api [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097137, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.374903] env[69927]: DEBUG nova.scheduler.client.report [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1540.484152] env[69927]: DEBUG oslo_vmware.api [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097137, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.879716] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.135s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1540.880134] env[69927]: DEBUG nova.compute.manager [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1540.985077] env[69927]: DEBUG oslo_vmware.api [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097137, 'name': ReconfigVM_Task, 'duration_secs': 0.777627} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.985077] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Reconfigured VM instance instance-0000007e to attach disk [datastore2] b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62/b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1540.985663] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-adb990ea-49b2-425f-b097-a69160b9740a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.993129] env[69927]: DEBUG oslo_vmware.api [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1540.993129] env[69927]: value = "task-4097138" [ 1540.993129] env[69927]: _type = "Task" [ 1540.993129] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.002330] env[69927]: DEBUG oslo_vmware.api [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097138, 'name': Rename_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.385285] env[69927]: DEBUG nova.compute.utils [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1541.386732] env[69927]: DEBUG nova.compute.manager [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Allocating IP information in the background. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1541.386930] env[69927]: DEBUG nova.network.neutron [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] allocate_for_instance() {{(pid=69927) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1541.427328] env[69927]: DEBUG nova.policy [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef08164611894c289d4c30194d91526a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0823381b9f644adf818b490c551f5a3f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69927) authorize /opt/stack/nova/nova/policy.py:192}} [ 1541.502636] env[69927]: DEBUG oslo_vmware.api [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097138, 'name': Rename_Task, 'duration_secs': 0.155111} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.502960] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1541.503283] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c14da605-182a-408d-a059-e38bb0c7ea10 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.510057] env[69927]: DEBUG oslo_vmware.api [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1541.510057] env[69927]: value = "task-4097139" [ 1541.510057] env[69927]: _type = "Task" [ 1541.510057] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.517781] env[69927]: DEBUG oslo_vmware.api [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097139, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.702085] env[69927]: DEBUG nova.network.neutron [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Successfully created port: e8e1098c-fb9a-4eef-8751-2a7e610d3b71 {{(pid=69927) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1541.890266] env[69927]: DEBUG nova.compute.manager [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1542.020198] env[69927]: DEBUG oslo_vmware.api [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097139, 'name': PowerOnVM_Task, 'duration_secs': 0.48684} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.020490] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1542.020696] env[69927]: INFO nova.compute.manager [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Took 7.17 seconds to spawn the instance on the hypervisor. [ 1542.020879] env[69927]: DEBUG nova.compute.manager [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1542.021634] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f40eb2-e1e1-44a0-bded-33efdbb145e6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.398677] env[69927]: INFO nova.virt.block_device [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Booting with volume cf0f635f-6b34-45ac-933b-368bae0dcbd3 at /dev/sda [ 1542.436075] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0bbe1964-e48b-43e1-aa0c-75ef54bd5150 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.446715] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa973942-bba6-4d1f-8fb3-d3bcf832c1e8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.473852] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-22791cf2-556a-4734-880c-fc3bab314809 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.489082] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-971bd217-202f-4ff0-bd00-4ecd875e4921 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.517768] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c3e97d5-f42e-49b0-b90d-82fa2a951342 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.524879] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f9f951-80d2-4bbc-9ea5-bb2484447c1e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.539941] env[69927]: DEBUG nova.virt.block_device [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Updating existing volume attachment record: b675f7c6-0c28-4fc2-b1b4-e2c8a4ad3f45 {{(pid=69927) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1542.542896] env[69927]: INFO nova.compute.manager [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Took 11.86 seconds to build instance. [ 1543.011274] env[69927]: DEBUG nova.compute.manager [req-8f71afd1-970f-4c8b-9f59-17a5726f8761 req-9a64ec6d-5524-4a03-a064-fe01f5357949 service nova] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Received event network-changed-4c7c3743-a31a-4f05-9380-5e133b888b77 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1543.011583] env[69927]: DEBUG nova.compute.manager [req-8f71afd1-970f-4c8b-9f59-17a5726f8761 req-9a64ec6d-5524-4a03-a064-fe01f5357949 service nova] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Refreshing instance network info cache due to event network-changed-4c7c3743-a31a-4f05-9380-5e133b888b77. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1543.011894] env[69927]: DEBUG oslo_concurrency.lockutils [req-8f71afd1-970f-4c8b-9f59-17a5726f8761 req-9a64ec6d-5524-4a03-a064-fe01f5357949 service nova] Acquiring lock "refresh_cache-b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1543.012300] env[69927]: DEBUG oslo_concurrency.lockutils [req-8f71afd1-970f-4c8b-9f59-17a5726f8761 req-9a64ec6d-5524-4a03-a064-fe01f5357949 service nova] Acquired lock "refresh_cache-b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1543.013085] env[69927]: DEBUG nova.network.neutron [req-8f71afd1-970f-4c8b-9f59-17a5726f8761 req-9a64ec6d-5524-4a03-a064-fe01f5357949 service nova] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Refreshing network info cache for port 4c7c3743-a31a-4f05-9380-5e133b888b77 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1543.046558] env[69927]: DEBUG oslo_concurrency.lockutils [None req-b202a276-2835-442b-b4fe-79c88cc2641d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.373s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1543.095265] env[69927]: DEBUG nova.compute.manager [req-b674e565-763b-499b-95b9-716b86a00f3d req-6d1a8af0-5bdc-444c-b569-2f9c8451d07a service nova] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Received event network-vif-plugged-e8e1098c-fb9a-4eef-8751-2a7e610d3b71 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1543.095265] env[69927]: DEBUG oslo_concurrency.lockutils [req-b674e565-763b-499b-95b9-716b86a00f3d req-6d1a8af0-5bdc-444c-b569-2f9c8451d07a service nova] Acquiring lock "5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1543.095265] env[69927]: DEBUG oslo_concurrency.lockutils [req-b674e565-763b-499b-95b9-716b86a00f3d req-6d1a8af0-5bdc-444c-b569-2f9c8451d07a service nova] Lock "5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1543.095265] env[69927]: DEBUG oslo_concurrency.lockutils [req-b674e565-763b-499b-95b9-716b86a00f3d req-6d1a8af0-5bdc-444c-b569-2f9c8451d07a service nova] Lock "5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1543.095265] env[69927]: DEBUG nova.compute.manager [req-b674e565-763b-499b-95b9-716b86a00f3d req-6d1a8af0-5bdc-444c-b569-2f9c8451d07a service nova] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] No waiting events found dispatching network-vif-plugged-e8e1098c-fb9a-4eef-8751-2a7e610d3b71 {{(pid=69927) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1543.095265] env[69927]: WARNING nova.compute.manager [req-b674e565-763b-499b-95b9-716b86a00f3d req-6d1a8af0-5bdc-444c-b569-2f9c8451d07a service nova] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Received unexpected event network-vif-plugged-e8e1098c-fb9a-4eef-8751-2a7e610d3b71 for instance with vm_state building and task_state block_device_mapping. [ 1543.184082] env[69927]: DEBUG nova.network.neutron [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Successfully updated port: e8e1098c-fb9a-4eef-8751-2a7e610d3b71 {{(pid=69927) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1543.688937] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "refresh_cache-5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1543.689224] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired lock "refresh_cache-5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1543.689271] env[69927]: DEBUG nova.network.neutron [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1543.727404] env[69927]: DEBUG nova.network.neutron [req-8f71afd1-970f-4c8b-9f59-17a5726f8761 req-9a64ec6d-5524-4a03-a064-fe01f5357949 service nova] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Updated VIF entry in instance network info cache for port 4c7c3743-a31a-4f05-9380-5e133b888b77. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1543.727757] env[69927]: DEBUG nova.network.neutron [req-8f71afd1-970f-4c8b-9f59-17a5726f8761 req-9a64ec6d-5524-4a03-a064-fe01f5357949 service nova] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Updating instance_info_cache with network_info: [{"id": "4c7c3743-a31a-4f05-9380-5e133b888b77", "address": "fa:16:3e:61:ca:ab", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c7c3743-a3", "ovs_interfaceid": "4c7c3743-a31a-4f05-9380-5e133b888b77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1544.229089] env[69927]: DEBUG nova.network.neutron [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1544.231493] env[69927]: DEBUG oslo_concurrency.lockutils [req-8f71afd1-970f-4c8b-9f59-17a5726f8761 req-9a64ec6d-5524-4a03-a064-fe01f5357949 service nova] Releasing lock "refresh_cache-b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1544.371776] env[69927]: DEBUG nova.network.neutron [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Updating instance_info_cache with network_info: [{"id": "e8e1098c-fb9a-4eef-8751-2a7e610d3b71", "address": "fa:16:3e:2b:6a:95", "network": {"id": "1b67ec75-5b1a-4408-976d-0bd585378451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1628882259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0823381b9f644adf818b490c551f5a3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8e1098c-fb", "ovs_interfaceid": "e8e1098c-fb9a-4eef-8751-2a7e610d3b71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1544.640959] env[69927]: DEBUG nova.compute.manager [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1544.641484] env[69927]: DEBUG nova.virt.hardware [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1544.641737] env[69927]: DEBUG nova.virt.hardware [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1544.641922] env[69927]: DEBUG nova.virt.hardware [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1544.642113] env[69927]: DEBUG nova.virt.hardware [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1544.642260] env[69927]: DEBUG nova.virt.hardware [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1544.642405] env[69927]: DEBUG nova.virt.hardware [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1544.642657] env[69927]: DEBUG nova.virt.hardware [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1544.642846] env[69927]: DEBUG nova.virt.hardware [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1544.643033] env[69927]: DEBUG nova.virt.hardware [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1544.643204] env[69927]: DEBUG nova.virt.hardware [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1544.643376] env[69927]: DEBUG nova.virt.hardware [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1544.644290] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d2f31a-5819-4dc5-aedd-966ed3e34080 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.653218] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e09a245-2720-4978-ada6-6f8be8ae18a9 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.874554] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Releasing lock "refresh_cache-5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1544.874889] env[69927]: DEBUG nova.compute.manager [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Instance network_info: |[{"id": "e8e1098c-fb9a-4eef-8751-2a7e610d3b71", "address": "fa:16:3e:2b:6a:95", "network": {"id": "1b67ec75-5b1a-4408-976d-0bd585378451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1628882259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0823381b9f644adf818b490c551f5a3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8e1098c-fb", "ovs_interfaceid": "e8e1098c-fb9a-4eef-8751-2a7e610d3b71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1544.875489] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:6a:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0248a27a-1d7f-4195-987b-06bfc8467347', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e8e1098c-fb9a-4eef-8751-2a7e610d3b71', 'vif_model': 'vmxnet3'}] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1544.883555] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1544.883774] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1544.883999] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7029c9d9-89c3-4f49-a4b1-0f5d5f22f5b0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.904330] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1544.904330] env[69927]: value = "task-4097140" [ 1544.904330] env[69927]: _type = "Task" [ 1544.904330] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.915205] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097140, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.129160] env[69927]: DEBUG nova.compute.manager [req-ef00ce4b-816c-4ace-89a2-4a8f4e0b1ae1 req-f6826622-6d74-4294-bc22-94d71e80221f service nova] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Received event network-changed-e8e1098c-fb9a-4eef-8751-2a7e610d3b71 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1545.129390] env[69927]: DEBUG nova.compute.manager [req-ef00ce4b-816c-4ace-89a2-4a8f4e0b1ae1 req-f6826622-6d74-4294-bc22-94d71e80221f service nova] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Refreshing instance network info cache due to event network-changed-e8e1098c-fb9a-4eef-8751-2a7e610d3b71. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1545.129634] env[69927]: DEBUG oslo_concurrency.lockutils [req-ef00ce4b-816c-4ace-89a2-4a8f4e0b1ae1 req-f6826622-6d74-4294-bc22-94d71e80221f service nova] Acquiring lock "refresh_cache-5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1545.129745] env[69927]: DEBUG oslo_concurrency.lockutils [req-ef00ce4b-816c-4ace-89a2-4a8f4e0b1ae1 req-f6826622-6d74-4294-bc22-94d71e80221f service nova] Acquired lock "refresh_cache-5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1545.129906] env[69927]: DEBUG nova.network.neutron [req-ef00ce4b-816c-4ace-89a2-4a8f4e0b1ae1 req-f6826622-6d74-4294-bc22-94d71e80221f service nova] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Refreshing network info cache for port e8e1098c-fb9a-4eef-8751-2a7e610d3b71 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1545.415763] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097140, 'name': CreateVM_Task} progress is 25%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.828661] env[69927]: DEBUG nova.network.neutron [req-ef00ce4b-816c-4ace-89a2-4a8f4e0b1ae1 req-f6826622-6d74-4294-bc22-94d71e80221f service nova] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Updated VIF entry in instance network info cache for port e8e1098c-fb9a-4eef-8751-2a7e610d3b71. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1545.829045] env[69927]: DEBUG nova.network.neutron [req-ef00ce4b-816c-4ace-89a2-4a8f4e0b1ae1 req-f6826622-6d74-4294-bc22-94d71e80221f service nova] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Updating instance_info_cache with network_info: [{"id": "e8e1098c-fb9a-4eef-8751-2a7e610d3b71", "address": "fa:16:3e:2b:6a:95", "network": {"id": "1b67ec75-5b1a-4408-976d-0bd585378451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1628882259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0823381b9f644adf818b490c551f5a3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8e1098c-fb", "ovs_interfaceid": "e8e1098c-fb9a-4eef-8751-2a7e610d3b71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1545.915681] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097140, 'name': CreateVM_Task, 'duration_secs': 0.647198} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.916045] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1545.916541] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': 'b675f7c6-0c28-4fc2-b1b4-e2c8a4ad3f45', 'delete_on_termination': True, 'device_type': None, 'boot_index': 0, 'guest_format': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811627', 'volume_id': 'cf0f635f-6b34-45ac-933b-368bae0dcbd3', 'name': 'volume-cf0f635f-6b34-45ac-933b-368bae0dcbd3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8', 'attached_at': '', 'detached_at': '', 'volume_id': 'cf0f635f-6b34-45ac-933b-368bae0dcbd3', 'serial': 'cf0f635f-6b34-45ac-933b-368bae0dcbd3'}, 'mount_device': '/dev/sda', 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=69927) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1545.916752] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Root volume attach. Driver type: vmdk {{(pid=69927) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1545.917679] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a66fcb-e0dd-4ce6-af0a-c19eef4ed228 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.925305] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a62d855-f5e5-452e-8b3d-f166a4ba93a0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.930967] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36bc489c-89ff-4bc0-b0e5-5508f28d52d4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.937454] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-e4fb8078-fd2e-4eaa-9840-370de5ad6ea7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.944409] env[69927]: DEBUG oslo_vmware.api [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1545.944409] env[69927]: value = "task-4097141" [ 1545.944409] env[69927]: _type = "Task" [ 1545.944409] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.952468] env[69927]: DEBUG oslo_vmware.api [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097141, 'name': RelocateVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.331823] env[69927]: DEBUG oslo_concurrency.lockutils [req-ef00ce4b-816c-4ace-89a2-4a8f4e0b1ae1 req-f6826622-6d74-4294-bc22-94d71e80221f service nova] Releasing lock "refresh_cache-5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1546.454833] env[69927]: DEBUG oslo_vmware.api [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097141, 'name': RelocateVM_Task, 'duration_secs': 0.411233} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.455117] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Volume attach. Driver type: vmdk {{(pid=69927) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1546.455326] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811627', 'volume_id': 'cf0f635f-6b34-45ac-933b-368bae0dcbd3', 'name': 'volume-cf0f635f-6b34-45ac-933b-368bae0dcbd3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8', 'attached_at': '', 'detached_at': '', 'volume_id': 'cf0f635f-6b34-45ac-933b-368bae0dcbd3', 'serial': 'cf0f635f-6b34-45ac-933b-368bae0dcbd3'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1546.456111] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b4e251-9695-4a29-b15a-3ce2e1886b5c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.472640] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69da2639-9ed2-41d5-b2f6-57e9e226e9c2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.495134] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Reconfiguring VM instance instance-0000007f to attach disk [datastore2] volume-cf0f635f-6b34-45ac-933b-368bae0dcbd3/volume-cf0f635f-6b34-45ac-933b-368bae0dcbd3.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1546.495415] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58f91e6c-ddc3-41a0-9890-9c4a92030ffc {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.515779] env[69927]: DEBUG oslo_vmware.api [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1546.515779] env[69927]: value = "task-4097142" [ 1546.515779] env[69927]: _type = "Task" [ 1546.515779] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.527213] env[69927]: DEBUG oslo_vmware.api [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097142, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.025772] env[69927]: DEBUG oslo_vmware.api [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097142, 'name': ReconfigVM_Task, 'duration_secs': 0.266171} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.026144] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Reconfigured VM instance instance-0000007f to attach disk [datastore2] volume-cf0f635f-6b34-45ac-933b-368bae0dcbd3/volume-cf0f635f-6b34-45ac-933b-368bae0dcbd3.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1547.030952] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cfe624cf-4638-4221-ab4b-5ad7160c9e50 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.047263] env[69927]: DEBUG oslo_vmware.api [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1547.047263] env[69927]: value = "task-4097143" [ 1547.047263] env[69927]: _type = "Task" [ 1547.047263] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.056785] env[69927]: DEBUG oslo_vmware.api [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097143, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.557884] env[69927]: DEBUG oslo_vmware.api [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097143, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.058876] env[69927]: DEBUG oslo_vmware.api [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097143, 'name': ReconfigVM_Task, 'duration_secs': 0.807699} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.059385] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811627', 'volume_id': 'cf0f635f-6b34-45ac-933b-368bae0dcbd3', 'name': 'volume-cf0f635f-6b34-45ac-933b-368bae0dcbd3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8', 'attached_at': '', 'detached_at': '', 'volume_id': 'cf0f635f-6b34-45ac-933b-368bae0dcbd3', 'serial': 'cf0f635f-6b34-45ac-933b-368bae0dcbd3'} {{(pid=69927) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1548.059950] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-737b0c4c-4566-41ad-835f-0bc736aac786 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.066099] env[69927]: DEBUG oslo_vmware.api [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1548.066099] env[69927]: value = "task-4097144" [ 1548.066099] env[69927]: _type = "Task" [ 1548.066099] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.076011] env[69927]: DEBUG oslo_vmware.api [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097144, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.576444] env[69927]: DEBUG oslo_vmware.api [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097144, 'name': Rename_Task, 'duration_secs': 0.126548} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.576677] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1548.576918] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6d92109e-5f58-462a-a3fc-d973230ee57c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.583120] env[69927]: DEBUG oslo_vmware.api [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1548.583120] env[69927]: value = "task-4097145" [ 1548.583120] env[69927]: _type = "Task" [ 1548.583120] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.590357] env[69927]: DEBUG oslo_vmware.api [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097145, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.093121] env[69927]: DEBUG oslo_vmware.api [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097145, 'name': PowerOnVM_Task, 'duration_secs': 0.462727} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.093574] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1549.093632] env[69927]: INFO nova.compute.manager [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Took 4.45 seconds to spawn the instance on the hypervisor. [ 1549.093806] env[69927]: DEBUG nova.compute.manager [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1549.094575] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b122a33-8a4b-43ac-a07a-556da045f506 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.612921] env[69927]: INFO nova.compute.manager [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Took 10.89 seconds to build instance. [ 1550.114738] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c96df9d-0556-4acf-a841-8d8562b93fea tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.399s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1550.193682] env[69927]: DEBUG nova.compute.manager [req-02429752-6ca4-45e4-b8a9-83302c32e168 req-2a5f51df-d107-4e38-a12f-ec40858c9a2c service nova] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Received event network-changed-cefb857a-0190-4924-a605-749a4858cef2 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1550.193904] env[69927]: DEBUG nova.compute.manager [req-02429752-6ca4-45e4-b8a9-83302c32e168 req-2a5f51df-d107-4e38-a12f-ec40858c9a2c service nova] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Refreshing instance network info cache due to event network-changed-cefb857a-0190-4924-a605-749a4858cef2. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1550.194144] env[69927]: DEBUG oslo_concurrency.lockutils [req-02429752-6ca4-45e4-b8a9-83302c32e168 req-2a5f51df-d107-4e38-a12f-ec40858c9a2c service nova] Acquiring lock "refresh_cache-ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1550.194287] env[69927]: DEBUG oslo_concurrency.lockutils [req-02429752-6ca4-45e4-b8a9-83302c32e168 req-2a5f51df-d107-4e38-a12f-ec40858c9a2c service nova] Acquired lock "refresh_cache-ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1550.194446] env[69927]: DEBUG nova.network.neutron [req-02429752-6ca4-45e4-b8a9-83302c32e168 req-2a5f51df-d107-4e38-a12f-ec40858c9a2c service nova] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Refreshing network info cache for port cefb857a-0190-4924-a605-749a4858cef2 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1550.877366] env[69927]: DEBUG nova.network.neutron [req-02429752-6ca4-45e4-b8a9-83302c32e168 req-2a5f51df-d107-4e38-a12f-ec40858c9a2c service nova] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Updated VIF entry in instance network info cache for port cefb857a-0190-4924-a605-749a4858cef2. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1550.877874] env[69927]: DEBUG nova.network.neutron [req-02429752-6ca4-45e4-b8a9-83302c32e168 req-2a5f51df-d107-4e38-a12f-ec40858c9a2c service nova] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Updating instance_info_cache with network_info: [{"id": "cefb857a-0190-4924-a605-749a4858cef2", "address": "fa:16:3e:8f:db:3a", "network": {"id": "1b67ec75-5b1a-4408-976d-0bd585378451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1628882259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0823381b9f644adf818b490c551f5a3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcefb857a-01", "ovs_interfaceid": "cefb857a-0190-4924-a605-749a4858cef2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1551.380623] env[69927]: DEBUG oslo_concurrency.lockutils [req-02429752-6ca4-45e4-b8a9-83302c32e168 req-2a5f51df-d107-4e38-a12f-ec40858c9a2c service nova] Releasing lock "refresh_cache-ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1551.634614] env[69927]: DEBUG nova.compute.manager [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Stashing vm_state: active {{(pid=69927) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1552.153626] env[69927]: DEBUG oslo_concurrency.lockutils [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1552.153913] env[69927]: DEBUG oslo_concurrency.lockutils [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1552.220269] env[69927]: DEBUG nova.compute.manager [req-818b460f-793f-4805-8263-39a19d7dbf74 req-a740379b-60d6-46c3-bac1-423cf7c875de service nova] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Received event network-changed-e8e1098c-fb9a-4eef-8751-2a7e610d3b71 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1552.220472] env[69927]: DEBUG nova.compute.manager [req-818b460f-793f-4805-8263-39a19d7dbf74 req-a740379b-60d6-46c3-bac1-423cf7c875de service nova] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Refreshing instance network info cache due to event network-changed-e8e1098c-fb9a-4eef-8751-2a7e610d3b71. {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1552.220787] env[69927]: DEBUG oslo_concurrency.lockutils [req-818b460f-793f-4805-8263-39a19d7dbf74 req-a740379b-60d6-46c3-bac1-423cf7c875de service nova] Acquiring lock "refresh_cache-5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1552.220859] env[69927]: DEBUG oslo_concurrency.lockutils [req-818b460f-793f-4805-8263-39a19d7dbf74 req-a740379b-60d6-46c3-bac1-423cf7c875de service nova] Acquired lock "refresh_cache-5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1552.221162] env[69927]: DEBUG nova.network.neutron [req-818b460f-793f-4805-8263-39a19d7dbf74 req-a740379b-60d6-46c3-bac1-423cf7c875de service nova] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Refreshing network info cache for port e8e1098c-fb9a-4eef-8751-2a7e610d3b71 {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1552.659494] env[69927]: INFO nova.compute.claims [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1552.942343] env[69927]: DEBUG nova.network.neutron [req-818b460f-793f-4805-8263-39a19d7dbf74 req-a740379b-60d6-46c3-bac1-423cf7c875de service nova] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Updated VIF entry in instance network info cache for port e8e1098c-fb9a-4eef-8751-2a7e610d3b71. {{(pid=69927) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1552.942752] env[69927]: DEBUG nova.network.neutron [req-818b460f-793f-4805-8263-39a19d7dbf74 req-a740379b-60d6-46c3-bac1-423cf7c875de service nova] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Updating instance_info_cache with network_info: [{"id": "e8e1098c-fb9a-4eef-8751-2a7e610d3b71", "address": "fa:16:3e:2b:6a:95", "network": {"id": "1b67ec75-5b1a-4408-976d-0bd585378451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1628882259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0823381b9f644adf818b490c551f5a3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8e1098c-fb", "ovs_interfaceid": "e8e1098c-fb9a-4eef-8751-2a7e610d3b71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1553.166204] env[69927]: INFO nova.compute.resource_tracker [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Updating resource usage from migration a8b1eea2-f418-4f1d-8dae-808d7edde40b [ 1553.236373] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09df0b98-0732-4b8a-bc4e-b2b77fb6f065 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.244203] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8586e779-1664-4d00-afce-3bcfba044518 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.274518] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c64a85-d5c1-4bc9-8838-59ef3a7f8e8d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.282206] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97a6653f-eec5-4315-b84d-cc7dfa5a4802 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.295384] env[69927]: DEBUG nova.compute.provider_tree [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1553.446026] env[69927]: DEBUG oslo_concurrency.lockutils [req-818b460f-793f-4805-8263-39a19d7dbf74 req-a740379b-60d6-46c3-bac1-423cf7c875de service nova] Releasing lock "refresh_cache-5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1553.798814] env[69927]: DEBUG nova.scheduler.client.report [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1554.303760] env[69927]: DEBUG oslo_concurrency.lockutils [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.150s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1554.303976] env[69927]: INFO nova.compute.manager [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Migrating [ 1554.819574] env[69927]: DEBUG oslo_concurrency.lockutils [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "refresh_cache-5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1554.819974] env[69927]: DEBUG oslo_concurrency.lockutils [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired lock "refresh_cache-5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1554.819974] env[69927]: DEBUG nova.network.neutron [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1555.534846] env[69927]: DEBUG nova.network.neutron [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Updating instance_info_cache with network_info: [{"id": "e8e1098c-fb9a-4eef-8751-2a7e610d3b71", "address": "fa:16:3e:2b:6a:95", "network": {"id": "1b67ec75-5b1a-4408-976d-0bd585378451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1628882259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0823381b9f644adf818b490c551f5a3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8e1098c-fb", "ovs_interfaceid": "e8e1098c-fb9a-4eef-8751-2a7e610d3b71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1556.038470] env[69927]: DEBUG oslo_concurrency.lockutils [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Releasing lock "refresh_cache-5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1557.553415] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02556a3a-4dfb-4310-bf41-05b92e1b7bda {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.572691] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Updating instance '5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8' progress to 0 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1558.079190] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1558.079511] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f72e86aa-9a8e-49ff-8e59-b4b0020cef5f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.087732] env[69927]: DEBUG oslo_vmware.api [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1558.087732] env[69927]: value = "task-4097146" [ 1558.087732] env[69927]: _type = "Task" [ 1558.087732] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.099100] env[69927]: DEBUG oslo_vmware.api [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097146, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.599823] env[69927]: DEBUG oslo_vmware.api [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097146, 'name': PowerOffVM_Task, 'duration_secs': 0.211333} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.600229] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1558.600353] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Updating instance '5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8' progress to 17 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1559.106975] env[69927]: DEBUG nova.virt.hardware [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:35:01Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1559.107294] env[69927]: DEBUG nova.virt.hardware [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1559.107376] env[69927]: DEBUG nova.virt.hardware [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1559.107577] env[69927]: DEBUG nova.virt.hardware [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1559.107728] env[69927]: DEBUG nova.virt.hardware [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1559.107873] env[69927]: DEBUG nova.virt.hardware [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1559.108097] env[69927]: DEBUG nova.virt.hardware [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1559.108264] env[69927]: DEBUG nova.virt.hardware [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1559.108431] env[69927]: DEBUG nova.virt.hardware [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1559.108592] env[69927]: DEBUG nova.virt.hardware [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1559.108767] env[69927]: DEBUG nova.virt.hardware [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1559.113807] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a285581f-b53a-4564-8849-24db17a4d786 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.133369] env[69927]: DEBUG oslo_vmware.api [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1559.133369] env[69927]: value = "task-4097147" [ 1559.133369] env[69927]: _type = "Task" [ 1559.133369] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.142727] env[69927]: DEBUG oslo_vmware.api [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097147, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.648293] env[69927]: DEBUG oslo_vmware.api [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097147, 'name': ReconfigVM_Task, 'duration_secs': 0.219667} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.648719] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Updating instance '5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8' progress to 33 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1560.157468] env[69927]: DEBUG nova.virt.hardware [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1560.157802] env[69927]: DEBUG nova.virt.hardware [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1560.158105] env[69927]: DEBUG nova.virt.hardware [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1560.158448] env[69927]: DEBUG nova.virt.hardware [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1560.158679] env[69927]: DEBUG nova.virt.hardware [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1560.158887] env[69927]: DEBUG nova.virt.hardware [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1560.159243] env[69927]: DEBUG nova.virt.hardware [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1560.159525] env[69927]: DEBUG nova.virt.hardware [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1560.159783] env[69927]: DEBUG nova.virt.hardware [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1560.159986] env[69927]: DEBUG nova.virt.hardware [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1560.160240] env[69927]: DEBUG nova.virt.hardware [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1560.167378] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Reconfiguring VM instance instance-0000007f to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1560.167813] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f58eeaf-1e3a-4ab7-bd0d-8558aee941c8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.187547] env[69927]: DEBUG oslo_vmware.api [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1560.187547] env[69927]: value = "task-4097148" [ 1560.187547] env[69927]: _type = "Task" [ 1560.187547] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.196099] env[69927]: DEBUG oslo_vmware.api [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097148, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.697855] env[69927]: DEBUG oslo_vmware.api [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097148, 'name': ReconfigVM_Task, 'duration_secs': 0.241674} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.698244] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Reconfigured VM instance instance-0000007f to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1560.698969] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-594bbf77-c595-4ee5-bb4e-9e3c84ea5ce4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.721531] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Reconfiguring VM instance instance-0000007f to attach disk [datastore2] volume-cf0f635f-6b34-45ac-933b-368bae0dcbd3/volume-cf0f635f-6b34-45ac-933b-368bae0dcbd3.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1560.722253] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ca154f5-f0ac-4312-ab16-28095124d12f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.740789] env[69927]: DEBUG oslo_vmware.api [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1560.740789] env[69927]: value = "task-4097149" [ 1560.740789] env[69927]: _type = "Task" [ 1560.740789] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.749152] env[69927]: DEBUG oslo_vmware.api [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097149, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.251232] env[69927]: DEBUG oslo_vmware.api [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097149, 'name': ReconfigVM_Task, 'duration_secs': 0.29479} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.251512] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Reconfigured VM instance instance-0000007f to attach disk [datastore2] volume-cf0f635f-6b34-45ac-933b-368bae0dcbd3/volume-cf0f635f-6b34-45ac-933b-368bae0dcbd3.vmdk or device None with type thin {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1561.251782] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Updating instance '5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8' progress to 50 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1561.758323] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3244d24e-7ec9-47de-908f-a22d63f7a74f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.777384] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d950791f-30b7-405a-8423-a4f123d4acca {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.796386] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Updating instance '5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8' progress to 67 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1563.520303] env[69927]: DEBUG nova.network.neutron [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Port e8e1098c-fb9a-4eef-8751-2a7e610d3b71 binding to destination host cpu-1 is already ACTIVE {{(pid=69927) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1564.542151] env[69927]: DEBUG oslo_concurrency.lockutils [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1564.542527] env[69927]: DEBUG oslo_concurrency.lockutils [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1564.542719] env[69927]: DEBUG oslo_concurrency.lockutils [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1565.581073] env[69927]: DEBUG oslo_concurrency.lockutils [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "refresh_cache-5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1565.581387] env[69927]: DEBUG oslo_concurrency.lockutils [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired lock "refresh_cache-5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1565.581443] env[69927]: DEBUG nova.network.neutron [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1566.284927] env[69927]: DEBUG nova.network.neutron [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Updating instance_info_cache with network_info: [{"id": "e8e1098c-fb9a-4eef-8751-2a7e610d3b71", "address": "fa:16:3e:2b:6a:95", "network": {"id": "1b67ec75-5b1a-4408-976d-0bd585378451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1628882259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0823381b9f644adf818b490c551f5a3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8e1098c-fb", "ovs_interfaceid": "e8e1098c-fb9a-4eef-8751-2a7e610d3b71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1566.787831] env[69927]: DEBUG oslo_concurrency.lockutils [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Releasing lock "refresh_cache-5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1567.297648] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1d1840-eb9c-4d41-936c-5bc8dbab2287 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.306921] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a8fda3-6f58-4258-b029-27c6a3e2f649 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.406501] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a1cffbe-bdb4-40d1-a416-1ad718666378 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.425643] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b96f5f01-847f-4df0-969a-90fcfd1c2fc2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.432604] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Updating instance '5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8' progress to 83 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1568.938699] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1568.939256] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f1338190-ed74-4734-9915-ae89eec3db7e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.948063] env[69927]: DEBUG oslo_vmware.api [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1568.948063] env[69927]: value = "task-4097150" [ 1568.948063] env[69927]: _type = "Task" [ 1568.948063] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.957322] env[69927]: DEBUG oslo_vmware.api [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097150, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.461929] env[69927]: DEBUG oslo_vmware.api [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097150, 'name': PowerOnVM_Task, 'duration_secs': 0.386016} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.462385] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1569.462605] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-83125b25-c6f3-4f8a-abe6-c47b331f87dc tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Updating instance '5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8' progress to 100 {{(pid=69927) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1572.106683] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8021b5c6-07ca-473a-a5b8-d01ee8c78b28 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1572.107126] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8021b5c6-07ca-473a-a5b8-d01ee8c78b28 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1572.107189] env[69927]: DEBUG nova.compute.manager [None req-8021b5c6-07ca-473a-a5b8-d01ee8c78b28 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Going to confirm migration 10 {{(pid=69927) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1572.679026] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8021b5c6-07ca-473a-a5b8-d01ee8c78b28 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "refresh_cache-5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1572.679228] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8021b5c6-07ca-473a-a5b8-d01ee8c78b28 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquired lock "refresh_cache-5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1572.679410] env[69927]: DEBUG nova.network.neutron [None req-8021b5c6-07ca-473a-a5b8-d01ee8c78b28 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1572.679596] env[69927]: DEBUG nova.objects.instance [None req-8021b5c6-07ca-473a-a5b8-d01ee8c78b28 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lazy-loading 'info_cache' on Instance uuid 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1573.915235] env[69927]: DEBUG nova.network.neutron [None req-8021b5c6-07ca-473a-a5b8-d01ee8c78b28 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Updating instance_info_cache with network_info: [{"id": "e8e1098c-fb9a-4eef-8751-2a7e610d3b71", "address": "fa:16:3e:2b:6a:95", "network": {"id": "1b67ec75-5b1a-4408-976d-0bd585378451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1628882259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0823381b9f644adf818b490c551f5a3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8e1098c-fb", "ovs_interfaceid": "e8e1098c-fb9a-4eef-8751-2a7e610d3b71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1574.418859] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8021b5c6-07ca-473a-a5b8-d01ee8c78b28 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Releasing lock "refresh_cache-5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1574.418859] env[69927]: DEBUG nova.objects.instance [None req-8021b5c6-07ca-473a-a5b8-d01ee8c78b28 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lazy-loading 'migration_context' on Instance uuid 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1574.632385] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1574.632645] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1574.632781] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1574.632970] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1574.633180] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1574.633248] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1574.633398] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1574.633538] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69927) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1574.633680] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager.update_available_resource {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1574.922684] env[69927]: DEBUG nova.objects.base [None req-8021b5c6-07ca-473a-a5b8-d01ee8c78b28 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Object Instance<5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8> lazy-loaded attributes: info_cache,migration_context {{(pid=69927) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1574.923679] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da3ce97-52a8-4264-842c-543ff8064342 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.944699] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-112ddaa7-63f3-4b60-b6f4-cd359fec5673 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.951790] env[69927]: DEBUG oslo_vmware.api [None req-8021b5c6-07ca-473a-a5b8-d01ee8c78b28 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1574.951790] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ad5eb9-47b1-dd8f-7dde-7dc130241fb2" [ 1574.951790] env[69927]: _type = "Task" [ 1574.951790] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.961381] env[69927]: DEBUG oslo_vmware.api [None req-8021b5c6-07ca-473a-a5b8-d01ee8c78b28 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ad5eb9-47b1-dd8f-7dde-7dc130241fb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.136749] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1575.137031] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1575.137184] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1575.137380] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69927) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1575.138240] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5968f9f7-4618-445c-ae4b-0c91bc70b9fd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.146830] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5541e05-bedf-40ec-abd1-50291a50e839 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.162945] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc28107-77af-4039-8e5f-e074c1e02fce {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.170405] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf82ac3-201c-4d04-9a2d-5b12c1a30a07 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.199141] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180251MB free_disk=17GB free_vcpus=48 pci_devices=None {{(pid=69927) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1575.199306] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1575.199503] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1575.462259] env[69927]: DEBUG oslo_vmware.api [None req-8021b5c6-07ca-473a-a5b8-d01ee8c78b28 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52ad5eb9-47b1-dd8f-7dde-7dc130241fb2, 'name': SearchDatastore_Task, 'duration_secs': 0.010408} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.462554] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8021b5c6-07ca-473a-a5b8-d01ee8c78b28 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1576.207653] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Applying migration context for instance 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8 as it has an incoming, in-progress migration a8b1eea2-f418-4f1d-8dae-808d7edde40b. Migration status is confirming {{(pid=69927) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1046}} [ 1576.208502] env[69927]: INFO nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Updating resource usage from migration a8b1eea2-f418-4f1d-8dae-808d7edde40b [ 1576.227943] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1576.228116] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance eb05a071-88d4-4e51-936f-d5b7554ac204 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1576.228243] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1576.228367] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Migration a8b1eea2-f418-4f1d-8dae-808d7edde40b is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1742}} [ 1576.228487] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Instance 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69927) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1576.228672] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1576.228812] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1576.247863] env[69927]: DEBUG nova.scheduler.client.report [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Refreshing inventories for resource provider 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1576.261394] env[69927]: DEBUG nova.scheduler.client.report [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Updating ProviderTree inventory for provider 2f529b36-df5f-4b37-8103-68f74f737726 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1576.261585] env[69927]: DEBUG nova.compute.provider_tree [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Updating inventory in ProviderTree for provider 2f529b36-df5f-4b37-8103-68f74f737726 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1576.273227] env[69927]: DEBUG nova.scheduler.client.report [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Refreshing aggregate associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, aggregates: None {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1576.292244] env[69927]: DEBUG nova.scheduler.client.report [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Refreshing trait associations for resource provider 2f529b36-df5f-4b37-8103-68f74f737726, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64 {{(pid=69927) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1576.363568] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99b83da0-612b-463e-9668-ea8d1193388f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.372124] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20d3a71-fa0a-46f6-a025-b9cbab8e17ad {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.403829] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd87192-acfd-41fe-afaf-6f1364bb0c90 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.412839] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c84eb8ba-2910-43c9-9b13-064b7a3b7863 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.426483] env[69927]: DEBUG nova.compute.provider_tree [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1576.933657] env[69927]: DEBUG nova.scheduler.client.report [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1577.439640] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69927) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1577.439906] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.240s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1577.440152] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8021b5c6-07ca-473a-a5b8-d01ee8c78b28 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.978s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1578.009055] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187de3c9-3e94-49f2-aed4-6beffba579a1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.016898] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ff4db5-b8db-4e74-85d6-850886ef0b29 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.046884] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee6d025-4f84-4f89-b477-8108648308eb {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.054851] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19254ba9-c713-4919-9efc-8ace8bbeb3ed {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.069295] env[69927]: DEBUG nova.compute.provider_tree [None req-8021b5c6-07ca-473a-a5b8-d01ee8c78b28 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1578.311343] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1578.573101] env[69927]: DEBUG nova.scheduler.client.report [None req-8021b5c6-07ca-473a-a5b8-d01ee8c78b28 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1579.584210] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8021b5c6-07ca-473a-a5b8-d01ee8c78b28 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.144s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1580.114931] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fa7a2205-6f05-41d2-b024-494b5ba2ae85 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1580.115256] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fa7a2205-6f05-41d2-b024-494b5ba2ae85 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1580.115438] env[69927]: DEBUG nova.compute.manager [None req-fa7a2205-6f05-41d2-b024-494b5ba2ae85 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1580.116378] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-640e5142-1b60-4a90-8038-43fd85290af3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.123575] env[69927]: DEBUG nova.compute.manager [None req-fa7a2205-6f05-41d2-b024-494b5ba2ae85 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69927) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1580.124234] env[69927]: DEBUG nova.objects.instance [None req-fa7a2205-6f05-41d2-b024-494b5ba2ae85 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lazy-loading 'flavor' on Instance uuid b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1580.147637] env[69927]: INFO nova.scheduler.client.report [None req-8021b5c6-07ca-473a-a5b8-d01ee8c78b28 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Deleted allocation for migration a8b1eea2-f418-4f1d-8dae-808d7edde40b [ 1580.652976] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8021b5c6-07ca-473a-a5b8-d01ee8c78b28 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.546s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1580.685048] env[69927]: INFO nova.compute.manager [None req-cab8843c-2caa-4a6d-9a0c-d60ea1fec019 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Get console output [ 1580.685264] env[69927]: WARNING nova.virt.vmwareapi.driver [None req-cab8843c-2caa-4a6d-9a0c-d60ea1fec019 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] The console log is missing. Check your VSPC configuration [ 1581.131608] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa7a2205-6f05-41d2-b024-494b5ba2ae85 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1581.131934] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b58fd1e6-d785-4a36-bca6-3e7b2d6a4963 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.139547] env[69927]: DEBUG oslo_vmware.api [None req-fa7a2205-6f05-41d2-b024-494b5ba2ae85 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1581.139547] env[69927]: value = "task-4097151" [ 1581.139547] env[69927]: _type = "Task" [ 1581.139547] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.150812] env[69927]: DEBUG oslo_vmware.api [None req-fa7a2205-6f05-41d2-b024-494b5ba2ae85 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097151, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.649864] env[69927]: DEBUG oslo_vmware.api [None req-fa7a2205-6f05-41d2-b024-494b5ba2ae85 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097151, 'name': PowerOffVM_Task, 'duration_secs': 0.18512} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.650158] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa7a2205-6f05-41d2-b024-494b5ba2ae85 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1581.650363] env[69927]: DEBUG nova.compute.manager [None req-fa7a2205-6f05-41d2-b024-494b5ba2ae85 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1581.651183] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f54a9a0-8b09-4413-a617-17362245c68a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.162710] env[69927]: DEBUG oslo_concurrency.lockutils [None req-fa7a2205-6f05-41d2-b024-494b5ba2ae85 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.047s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1582.471436] env[69927]: DEBUG nova.objects.instance [None req-c7bcb346-d628-445e-812a-6ebfb52889e5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lazy-loading 'flavor' on Instance uuid b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1582.976671] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c7bcb346-d628-445e-812a-6ebfb52889e5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "refresh_cache-b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1582.976877] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c7bcb346-d628-445e-812a-6ebfb52889e5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired lock "refresh_cache-b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1582.977051] env[69927]: DEBUG nova.network.neutron [None req-c7bcb346-d628-445e-812a-6ebfb52889e5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1582.977241] env[69927]: DEBUG nova.objects.instance [None req-c7bcb346-d628-445e-812a-6ebfb52889e5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lazy-loading 'info_cache' on Instance uuid b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1583.481536] env[69927]: DEBUG nova.objects.base [None req-c7bcb346-d628-445e-812a-6ebfb52889e5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=69927) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1584.203334] env[69927]: DEBUG nova.network.neutron [None req-c7bcb346-d628-445e-812a-6ebfb52889e5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Updating instance_info_cache with network_info: [{"id": "4c7c3743-a31a-4f05-9380-5e133b888b77", "address": "fa:16:3e:61:ca:ab", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c7c3743-a3", "ovs_interfaceid": "4c7c3743-a31a-4f05-9380-5e133b888b77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1584.705716] env[69927]: DEBUG oslo_concurrency.lockutils [None req-c7bcb346-d628-445e-812a-6ebfb52889e5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Releasing lock "refresh_cache-b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1585.711925] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7bcb346-d628-445e-812a-6ebfb52889e5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1585.712329] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45656513-932f-45ad-8426-0ac7e95dfdf4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.721193] env[69927]: DEBUG oslo_vmware.api [None req-c7bcb346-d628-445e-812a-6ebfb52889e5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1585.721193] env[69927]: value = "task-4097152" [ 1585.721193] env[69927]: _type = "Task" [ 1585.721193] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.729577] env[69927]: DEBUG oslo_vmware.api [None req-c7bcb346-d628-445e-812a-6ebfb52889e5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097152, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.232296] env[69927]: DEBUG oslo_vmware.api [None req-c7bcb346-d628-445e-812a-6ebfb52889e5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097152, 'name': PowerOnVM_Task, 'duration_secs': 0.364787} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.232583] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7bcb346-d628-445e-812a-6ebfb52889e5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1586.232731] env[69927]: DEBUG nova.compute.manager [None req-c7bcb346-d628-445e-812a-6ebfb52889e5 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1586.234531] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c700743f-21bf-403a-b029-931c9fbafff7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.230465] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb6c1bbe-c8c0-46be-b306-99ed937fc0ff {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.266504] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-02e44a85-f6ec-4a33-9bc1-1ab7009de96d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Suspending the VM {{(pid=69927) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1587.266504] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-e633eb8f-3080-4f28-b873-075377ccd7bf {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.266504] env[69927]: DEBUG oslo_vmware.api [None req-02e44a85-f6ec-4a33-9bc1-1ab7009de96d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1587.266504] env[69927]: value = "task-4097153" [ 1587.266504] env[69927]: _type = "Task" [ 1587.266504] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.266504] env[69927]: DEBUG oslo_vmware.api [None req-02e44a85-f6ec-4a33-9bc1-1ab7009de96d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097153, 'name': SuspendVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.759110] env[69927]: DEBUG oslo_vmware.api [None req-02e44a85-f6ec-4a33-9bc1-1ab7009de96d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097153, 'name': SuspendVM_Task} progress is 66%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.259355] env[69927]: DEBUG oslo_vmware.api [None req-02e44a85-f6ec-4a33-9bc1-1ab7009de96d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097153, 'name': SuspendVM_Task, 'duration_secs': 0.615011} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.259740] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-02e44a85-f6ec-4a33-9bc1-1ab7009de96d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Suspended the VM {{(pid=69927) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1588.259795] env[69927]: DEBUG nova.compute.manager [None req-02e44a85-f6ec-4a33-9bc1-1ab7009de96d tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1588.260577] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e01257-d1c0-421d-a0c3-214d56b88470 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.580065] env[69927]: INFO nova.compute.manager [None req-82f3d54c-b6ac-4ef2-9784-99b924a6bd5e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Resuming [ 1589.580613] env[69927]: DEBUG nova.objects.instance [None req-82f3d54c-b6ac-4ef2-9784-99b924a6bd5e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lazy-loading 'flavor' on Instance uuid b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1591.091517] env[69927]: DEBUG oslo_concurrency.lockutils [None req-82f3d54c-b6ac-4ef2-9784-99b924a6bd5e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "refresh_cache-b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1591.091777] env[69927]: DEBUG oslo_concurrency.lockutils [None req-82f3d54c-b6ac-4ef2-9784-99b924a6bd5e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquired lock "refresh_cache-b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1591.091999] env[69927]: DEBUG nova.network.neutron [None req-82f3d54c-b6ac-4ef2-9784-99b924a6bd5e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1591.797347] env[69927]: DEBUG nova.network.neutron [None req-82f3d54c-b6ac-4ef2-9784-99b924a6bd5e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Updating instance_info_cache with network_info: [{"id": "4c7c3743-a31a-4f05-9380-5e133b888b77", "address": "fa:16:3e:61:ca:ab", "network": {"id": "3080c441-c872-4434-9a60-24f6c4b998e8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1799092483-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee421c661394f3fbf8d69a575f095a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57691231-2b8d-4d71-8f79-d4a6a1d95ec8", "external-id": "nsx-vlan-transportzone-373", "segmentation_id": 373, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c7c3743-a3", "ovs_interfaceid": "4c7c3743-a31a-4f05-9380-5e133b888b77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1592.300222] env[69927]: DEBUG oslo_concurrency.lockutils [None req-82f3d54c-b6ac-4ef2-9784-99b924a6bd5e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Releasing lock "refresh_cache-b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1592.301221] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5a1d26b-6e98-4551-b1f1-955ad3316702 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.308494] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-82f3d54c-b6ac-4ef2-9784-99b924a6bd5e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Resuming the VM {{(pid=69927) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1592.308729] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3983f82a-27a6-4393-a0a2-de6ba300e8d5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.315482] env[69927]: DEBUG oslo_vmware.api [None req-82f3d54c-b6ac-4ef2-9784-99b924a6bd5e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1592.315482] env[69927]: value = "task-4097154" [ 1592.315482] env[69927]: _type = "Task" [ 1592.315482] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.324127] env[69927]: DEBUG oslo_vmware.api [None req-82f3d54c-b6ac-4ef2-9784-99b924a6bd5e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097154, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.827376] env[69927]: DEBUG oslo_vmware.api [None req-82f3d54c-b6ac-4ef2-9784-99b924a6bd5e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097154, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.325982] env[69927]: DEBUG oslo_vmware.api [None req-82f3d54c-b6ac-4ef2-9784-99b924a6bd5e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097154, 'name': PowerOnVM_Task, 'duration_secs': 0.542441} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.326367] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-82f3d54c-b6ac-4ef2-9784-99b924a6bd5e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Resumed the VM {{(pid=69927) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1593.326546] env[69927]: DEBUG nova.compute.manager [None req-82f3d54c-b6ac-4ef2-9784-99b924a6bd5e tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1593.327421] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0967d874-3e79-4b62-b694-989a9881403e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.269507] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1594.269897] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1594.270226] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1594.270509] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1594.270772] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1594.272952] env[69927]: INFO nova.compute.manager [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Terminating instance [ 1594.776781] env[69927]: DEBUG nova.compute.manager [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1594.777183] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1594.777942] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ceed445-5f94-4023-8f7b-48df700b0b7e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.785959] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1594.786168] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff81114f-c616-49c1-9fff-b582b6fc3d87 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.793182] env[69927]: DEBUG oslo_vmware.api [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1594.793182] env[69927]: value = "task-4097155" [ 1594.793182] env[69927]: _type = "Task" [ 1594.793182] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.802830] env[69927]: DEBUG oslo_vmware.api [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097155, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.305071] env[69927]: DEBUG oslo_vmware.api [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097155, 'name': PowerOffVM_Task, 'duration_secs': 0.236317} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.305373] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1595.305571] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1595.305836] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8224ccc2-072f-4024-ac0e-d846626c2379 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.378079] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1595.378079] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1595.378079] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Deleting the datastore file [datastore2] b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1595.378336] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bc8b19e5-352c-4aa5-ad2a-54962e93c9a0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.384879] env[69927]: DEBUG oslo_vmware.api [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for the task: (returnval){ [ 1595.384879] env[69927]: value = "task-4097157" [ 1595.384879] env[69927]: _type = "Task" [ 1595.384879] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.393083] env[69927]: DEBUG oslo_vmware.api [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097157, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.894488] env[69927]: DEBUG oslo_vmware.api [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Task: {'id': task-4097157, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18966} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.894775] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1595.894950] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1595.895143] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1595.895321] env[69927]: INFO nova.compute.manager [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1595.895569] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1595.895755] env[69927]: DEBUG nova.compute.manager [-] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1595.895849] env[69927]: DEBUG nova.network.neutron [-] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1596.366502] env[69927]: DEBUG nova.compute.manager [req-7141f445-c4eb-4541-b237-7e6c8d3087a2 req-418cc5d7-8d08-418d-8694-6b2293191f37 service nova] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Received event network-vif-deleted-4c7c3743-a31a-4f05-9380-5e133b888b77 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1596.366694] env[69927]: INFO nova.compute.manager [req-7141f445-c4eb-4541-b237-7e6c8d3087a2 req-418cc5d7-8d08-418d-8694-6b2293191f37 service nova] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Neutron deleted interface 4c7c3743-a31a-4f05-9380-5e133b888b77; detaching it from the instance and deleting it from the info cache [ 1596.366866] env[69927]: DEBUG nova.network.neutron [req-7141f445-c4eb-4541-b237-7e6c8d3087a2 req-418cc5d7-8d08-418d-8694-6b2293191f37 service nova] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1596.846044] env[69927]: DEBUG nova.network.neutron [-] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1596.869209] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-64f45968-a2d8-49ae-b061-70ef7ea79606 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.881710] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-651123bf-5b9c-42bb-beb0-e97b798cef0e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.910919] env[69927]: DEBUG nova.compute.manager [req-7141f445-c4eb-4541-b237-7e6c8d3087a2 req-418cc5d7-8d08-418d-8694-6b2293191f37 service nova] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Detach interface failed, port_id=4c7c3743-a31a-4f05-9380-5e133b888b77, reason: Instance b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1597.349276] env[69927]: INFO nova.compute.manager [-] [instance: b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62] Took 1.45 seconds to deallocate network for instance. [ 1597.856169] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1597.856452] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1597.856673] env[69927]: DEBUG nova.objects.instance [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lazy-loading 'resources' on Instance uuid b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1598.423768] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91680fb2-6560-4306-b787-28fe730fed64 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.432507] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c2829de-0aff-4f88-a36d-7746f1008803 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.464673] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13831e1c-f7e5-4744-b022-56e4286baf7d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.472424] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a35871-cb42-43df-8f41-c5b9f28fc96f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.485769] env[69927]: DEBUG nova.compute.provider_tree [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1598.989512] env[69927]: DEBUG nova.scheduler.client.report [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1599.494946] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.638s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1599.523702] env[69927]: INFO nova.scheduler.client.report [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Deleted allocations for instance b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62 [ 1600.031982] env[69927]: DEBUG oslo_concurrency.lockutils [None req-ff1d880a-f1a6-4e9a-bfdb-8cb2440cf1f6 tempest-ServerActionsTestJSON-2022006223 tempest-ServerActionsTestJSON-2022006223-project-member] Lock "b0c9fac9-3ce0-4b58-bb4d-d05e274a5a62" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.762s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1603.609653] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Acquiring lock "ac9403a1-23c9-40ca-adfb-90a926712dc3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1603.609983] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Lock "ac9403a1-23c9-40ca-adfb-90a926712dc3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1604.112963] env[69927]: DEBUG nova.compute.manager [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Starting instance... {{(pid=69927) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1604.633991] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1604.634323] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1604.635736] env[69927]: INFO nova.compute.claims [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1605.700649] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df59db92-1f6e-49e3-bee6-b882f1feeff2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.708179] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f96be02-f17c-4c73-a479-c44fb05071c3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.738183] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4665b76-1b83-4902-8b42-b2645737d1d2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.745574] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40511e21-c73a-44e6-9826-554f71e8ad6e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.759457] env[69927]: DEBUG nova.compute.provider_tree [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1605.971861] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1605.972104] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1605.972321] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1605.972503] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1605.972667] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1605.974820] env[69927]: INFO nova.compute.manager [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Terminating instance [ 1606.262936] env[69927]: DEBUG nova.scheduler.client.report [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1606.478398] env[69927]: DEBUG nova.compute.manager [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1606.478633] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1606.478967] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bd917199-8e9d-493e-b8bf-6602df89f4e3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.486492] env[69927]: DEBUG oslo_vmware.api [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1606.486492] env[69927]: value = "task-4097158" [ 1606.486492] env[69927]: _type = "Task" [ 1606.486492] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.495639] env[69927]: DEBUG oslo_vmware.api [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097158, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.768152] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.134s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1606.768681] env[69927]: DEBUG nova.compute.manager [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Start building networks asynchronously for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1606.996825] env[69927]: DEBUG oslo_vmware.api [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097158, 'name': PowerOffVM_Task, 'duration_secs': 0.194311} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.997102] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1606.997307] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Volume detach. Driver type: vmdk {{(pid=69927) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1606.997506] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811627', 'volume_id': 'cf0f635f-6b34-45ac-933b-368bae0dcbd3', 'name': 'volume-cf0f635f-6b34-45ac-933b-368bae0dcbd3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8', 'attached_at': '2025-05-13T19:52:01.000000', 'detached_at': '', 'volume_id': 'cf0f635f-6b34-45ac-933b-368bae0dcbd3', 'serial': 'cf0f635f-6b34-45ac-933b-368bae0dcbd3'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1606.998318] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd47dde0-1df3-4e2e-ac05-f2c93abcb550 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.017540] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c107f4-8de5-4cf7-a998-8df579b0afc8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.025898] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f83a90a-2fdf-46ff-a0f3-271b89ae5776 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.044868] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f87c214-e484-4557-8256-06cd56eebcba {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.061047] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] The volume has not been displaced from its original location: [datastore2] volume-cf0f635f-6b34-45ac-933b-368bae0dcbd3/volume-cf0f635f-6b34-45ac-933b-368bae0dcbd3.vmdk. No consolidation needed. {{(pid=69927) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1607.066562] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Reconfiguring VM instance instance-0000007f to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1607.066926] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ff0d145-ecb2-4b6b-9f0a-3d5ca9cc4605 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.086570] env[69927]: DEBUG oslo_vmware.api [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1607.086570] env[69927]: value = "task-4097159" [ 1607.086570] env[69927]: _type = "Task" [ 1607.086570] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.096987] env[69927]: DEBUG oslo_vmware.api [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097159, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.273916] env[69927]: DEBUG nova.compute.utils [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Using /dev/sd instead of None {{(pid=69927) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1607.275379] env[69927]: DEBUG nova.compute.manager [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Not allocating networking since 'none' was specified. {{(pid=69927) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1607.597326] env[69927]: DEBUG oslo_vmware.api [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097159, 'name': ReconfigVM_Task, 'duration_secs': 0.182306} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.597543] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Reconfigured VM instance instance-0000007f to detach disk 2000 {{(pid=69927) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1607.602311] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dbd68f10-da48-4ec5-b204-b86d42d8eeef {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.619607] env[69927]: DEBUG oslo_vmware.api [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1607.619607] env[69927]: value = "task-4097160" [ 1607.619607] env[69927]: _type = "Task" [ 1607.619607] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.630527] env[69927]: DEBUG oslo_vmware.api [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097160, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.777664] env[69927]: DEBUG nova.compute.manager [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Start building block device mappings for instance. {{(pid=69927) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1608.130105] env[69927]: DEBUG oslo_vmware.api [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097160, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.630634] env[69927]: DEBUG oslo_vmware.api [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097160, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.787206] env[69927]: DEBUG nova.compute.manager [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Start spawning the instance on the hypervisor. {{(pid=69927) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1608.813693] env[69927]: DEBUG nova.virt.hardware [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1608.813939] env[69927]: DEBUG nova.virt.hardware [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1608.814109] env[69927]: DEBUG nova.virt.hardware [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1608.814311] env[69927]: DEBUG nova.virt.hardware [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1608.814454] env[69927]: DEBUG nova.virt.hardware [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1608.814599] env[69927]: DEBUG nova.virt.hardware [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1608.814804] env[69927]: DEBUG nova.virt.hardware [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1608.814968] env[69927]: DEBUG nova.virt.hardware [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1608.815154] env[69927]: DEBUG nova.virt.hardware [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1608.815315] env[69927]: DEBUG nova.virt.hardware [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1608.815492] env[69927]: DEBUG nova.virt.hardware [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1608.816346] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d5d462-0a67-452c-a0f5-f42f2ea444e5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.824167] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0d0aeb2-7dcc-4100-bd6c-f4475178c3b2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.838123] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Instance VIF info [] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1608.845022] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Creating folder: Project (e17a48df7f574087b7ea1ea677ea1e8f). Parent ref: group-v811283. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1608.845022] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2cf0db11-ac0b-4ce3-9c29-e12769e1f96a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.855696] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Created folder: Project (e17a48df7f574087b7ea1ea677ea1e8f) in parent group-v811283. [ 1608.856112] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Creating folder: Instances. Parent ref: group-v811630. {{(pid=69927) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1608.856191] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-85fb8fbb-b978-4ed6-856c-f045a3f62321 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.866287] env[69927]: INFO nova.virt.vmwareapi.vm_util [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Created folder: Instances in parent group-v811630. [ 1608.867202] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1608.867202] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1608.867202] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-39ae9163-4fd5-429e-8aa2-f83e81b3475f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.883679] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1608.883679] env[69927]: value = "task-4097163" [ 1608.883679] env[69927]: _type = "Task" [ 1608.883679] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.891236] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097163, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.130999] env[69927]: DEBUG oslo_vmware.api [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097160, 'name': ReconfigVM_Task, 'duration_secs': 1.150519} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.131321] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-811627', 'volume_id': 'cf0f635f-6b34-45ac-933b-368bae0dcbd3', 'name': 'volume-cf0f635f-6b34-45ac-933b-368bae0dcbd3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8', 'attached_at': '2025-05-13T19:52:01.000000', 'detached_at': '', 'volume_id': 'cf0f635f-6b34-45ac-933b-368bae0dcbd3', 'serial': 'cf0f635f-6b34-45ac-933b-368bae0dcbd3'} {{(pid=69927) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1609.131609] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1609.132414] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c49cc2f2-c394-4004-bf57-3db82909e7a0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.138951] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1609.139200] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9ddd9d54-0e46-4782-afae-a8a356d61965 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.211907] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1609.212171] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1609.212368] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Deleting the datastore file [datastore2] 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1609.212644] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6e8def49-9d73-461a-adc6-43e63ccbb9b2 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.219286] env[69927]: DEBUG oslo_vmware.api [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1609.219286] env[69927]: value = "task-4097165" [ 1609.219286] env[69927]: _type = "Task" [ 1609.219286] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.227233] env[69927]: DEBUG oslo_vmware.api [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097165, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.393433] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097163, 'name': CreateVM_Task, 'duration_secs': 0.228265} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.393635] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1609.393976] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1609.394154] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1609.394509] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1609.394757] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-549f782d-5f3c-475d-a81e-3dbd1fac93e3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.399083] env[69927]: DEBUG oslo_vmware.api [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Waiting for the task: (returnval){ [ 1609.399083] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f70dc5-5f7d-0dc6-0ec5-ecc067537a98" [ 1609.399083] env[69927]: _type = "Task" [ 1609.399083] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.407622] env[69927]: DEBUG oslo_vmware.api [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f70dc5-5f7d-0dc6-0ec5-ecc067537a98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.728962] env[69927]: DEBUG oslo_vmware.api [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097165, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080354} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.729221] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1609.729353] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1609.729530] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1609.729704] env[69927]: INFO nova.compute.manager [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Took 3.25 seconds to destroy the instance on the hypervisor. [ 1609.729945] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1609.730146] env[69927]: DEBUG nova.compute.manager [-] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1609.730241] env[69927]: DEBUG nova.network.neutron [-] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1609.908987] env[69927]: DEBUG oslo_vmware.api [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52f70dc5-5f7d-0dc6-0ec5-ecc067537a98, 'name': SearchDatastore_Task, 'duration_secs': 0.00978} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.909337] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1609.909541] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1609.909778] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1609.909921] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1609.910111] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1609.910366] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6ba3f6da-8314-4a87-a140-c18d2a86d91b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.919214] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1609.920033] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1609.920130] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af72564a-7ba0-4107-8572-fb1e80ba131f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.925677] env[69927]: DEBUG oslo_vmware.api [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Waiting for the task: (returnval){ [ 1609.925677] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529e9fed-b3cf-4f53-f109-2f4a5ac43621" [ 1609.925677] env[69927]: _type = "Task" [ 1609.925677] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.933641] env[69927]: DEBUG oslo_vmware.api [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529e9fed-b3cf-4f53-f109-2f4a5ac43621, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.207825] env[69927]: DEBUG nova.compute.manager [req-5854f381-855f-4c77-ae67-3ea405706171 req-26c9302a-ad63-4a6c-aded-e5d2cc4af9eb service nova] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Received event network-vif-deleted-e8e1098c-fb9a-4eef-8751-2a7e610d3b71 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1610.207825] env[69927]: INFO nova.compute.manager [req-5854f381-855f-4c77-ae67-3ea405706171 req-26c9302a-ad63-4a6c-aded-e5d2cc4af9eb service nova] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Neutron deleted interface e8e1098c-fb9a-4eef-8751-2a7e610d3b71; detaching it from the instance and deleting it from the info cache [ 1610.207825] env[69927]: DEBUG nova.network.neutron [req-5854f381-855f-4c77-ae67-3ea405706171 req-26c9302a-ad63-4a6c-aded-e5d2cc4af9eb service nova] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1610.436507] env[69927]: DEBUG oslo_vmware.api [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]529e9fed-b3cf-4f53-f109-2f4a5ac43621, 'name': SearchDatastore_Task, 'duration_secs': 0.009808} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.437443] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a274d17-b143-46fc-90a5-44e11064bb90 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.442662] env[69927]: DEBUG oslo_vmware.api [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Waiting for the task: (returnval){ [ 1610.442662] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5202f724-cb0c-6b2a-98c7-1290fe27a8dd" [ 1610.442662] env[69927]: _type = "Task" [ 1610.442662] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.451493] env[69927]: DEBUG oslo_vmware.api [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5202f724-cb0c-6b2a-98c7-1290fe27a8dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.652108] env[69927]: DEBUG nova.network.neutron [-] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1610.711064] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b55a31d3-f607-43d8-a47b-8010c326eb0b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.724042] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d1aa1b-13b7-4363-9511-75da8d93b9b5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.757774] env[69927]: DEBUG nova.compute.manager [req-5854f381-855f-4c77-ae67-3ea405706171 req-26c9302a-ad63-4a6c-aded-e5d2cc4af9eb service nova] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Detach interface failed, port_id=e8e1098c-fb9a-4eef-8751-2a7e610d3b71, reason: Instance 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1610.952947] env[69927]: DEBUG oslo_vmware.api [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5202f724-cb0c-6b2a-98c7-1290fe27a8dd, 'name': SearchDatastore_Task, 'duration_secs': 0.010191} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.953314] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1610.953515] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] ac9403a1-23c9-40ca-adfb-90a926712dc3/ac9403a1-23c9-40ca-adfb-90a926712dc3.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1610.953774] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-28fdb533-95e1-4489-abde-074e4dfad60b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.960094] env[69927]: DEBUG oslo_vmware.api [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Waiting for the task: (returnval){ [ 1610.960094] env[69927]: value = "task-4097166" [ 1610.960094] env[69927]: _type = "Task" [ 1610.960094] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.969863] env[69927]: DEBUG oslo_vmware.api [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097166, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.154933] env[69927]: INFO nova.compute.manager [-] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Took 1.42 seconds to deallocate network for instance. [ 1611.471073] env[69927]: DEBUG oslo_vmware.api [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097166, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.456611} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.471073] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] ac9403a1-23c9-40ca-adfb-90a926712dc3/ac9403a1-23c9-40ca-adfb-90a926712dc3.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1611.471073] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1611.471073] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ffd2edc9-004d-4cb0-aee4-975391d8d5dd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.477723] env[69927]: DEBUG oslo_vmware.api [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Waiting for the task: (returnval){ [ 1611.477723] env[69927]: value = "task-4097167" [ 1611.477723] env[69927]: _type = "Task" [ 1611.477723] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.485353] env[69927]: DEBUG oslo_vmware.api [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097167, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.703863] env[69927]: INFO nova.compute.manager [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Took 0.55 seconds to detach 1 volumes for instance. [ 1611.705971] env[69927]: DEBUG nova.compute.manager [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8] Deleting volume: cf0f635f-6b34-45ac-933b-368bae0dcbd3 {{(pid=69927) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1611.988275] env[69927]: DEBUG oslo_vmware.api [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097167, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066984} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.988624] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1611.989327] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee940398-a36c-4c7a-b14e-313e67161207 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.008921] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Reconfiguring VM instance instance-00000080 to attach disk [datastore2] ac9403a1-23c9-40ca-adfb-90a926712dc3/ac9403a1-23c9-40ca-adfb-90a926712dc3.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1612.009201] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81a3e83d-04f1-4085-ad85-5d8f21ccd525 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.028068] env[69927]: DEBUG oslo_vmware.api [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Waiting for the task: (returnval){ [ 1612.028068] env[69927]: value = "task-4097169" [ 1612.028068] env[69927]: _type = "Task" [ 1612.028068] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.035845] env[69927]: DEBUG oslo_vmware.api [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097169, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.246068] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1612.246458] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1612.246601] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1612.271560] env[69927]: INFO nova.scheduler.client.report [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Deleted allocations for instance 5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8 [ 1612.538250] env[69927]: DEBUG oslo_vmware.api [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097169, 'name': ReconfigVM_Task, 'duration_secs': 0.272503} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.538563] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Reconfigured VM instance instance-00000080 to attach disk [datastore2] ac9403a1-23c9-40ca-adfb-90a926712dc3/ac9403a1-23c9-40ca-adfb-90a926712dc3.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1612.539155] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4ad97e79-ce94-4bc4-b09f-6d7f4db111fd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.546455] env[69927]: DEBUG oslo_vmware.api [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Waiting for the task: (returnval){ [ 1612.546455] env[69927]: value = "task-4097170" [ 1612.546455] env[69927]: _type = "Task" [ 1612.546455] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.555097] env[69927]: DEBUG oslo_vmware.api [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097170, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.779726] env[69927]: DEBUG oslo_concurrency.lockutils [None req-8aacdeab-677f-4fad-a206-02e327238516 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "5a24c7b2-5674-4fa7-9cc8-0e2a08d53cd8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.808s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1613.057137] env[69927]: DEBUG oslo_vmware.api [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097170, 'name': Rename_Task, 'duration_secs': 0.143509} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.057510] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1613.057650] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ddcbd057-344a-423d-82ec-9f60c51a4e5b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.064507] env[69927]: DEBUG oslo_vmware.api [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Waiting for the task: (returnval){ [ 1613.064507] env[69927]: value = "task-4097171" [ 1613.064507] env[69927]: _type = "Task" [ 1613.064507] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.072299] env[69927]: DEBUG oslo_vmware.api [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097171, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.575623] env[69927]: DEBUG oslo_vmware.api [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097171, 'name': PowerOnVM_Task, 'duration_secs': 0.412442} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.575898] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1613.576127] env[69927]: INFO nova.compute.manager [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Took 4.79 seconds to spawn the instance on the hypervisor. [ 1613.576303] env[69927]: DEBUG nova.compute.manager [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1613.577060] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0ad03f-4c1f-4e6a-aeda-e4cc9c330ff0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.762620] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "eb05a071-88d4-4e51-936f-d5b7554ac204" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1613.762620] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "eb05a071-88d4-4e51-936f-d5b7554ac204" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1613.762844] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "eb05a071-88d4-4e51-936f-d5b7554ac204-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1613.762923] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "eb05a071-88d4-4e51-936f-d5b7554ac204-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1613.763087] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "eb05a071-88d4-4e51-936f-d5b7554ac204-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1613.765756] env[69927]: INFO nova.compute.manager [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Terminating instance [ 1614.093509] env[69927]: INFO nova.compute.manager [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Took 9.47 seconds to build instance. [ 1614.268752] env[69927]: DEBUG nova.compute.manager [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1614.269036] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1614.270276] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0b61911-4e32-49df-b57d-64253ca4d6ad {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.278615] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1614.278897] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fcd13d7b-1b13-492a-8b16-f495f223c80f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.285978] env[69927]: DEBUG oslo_vmware.api [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1614.285978] env[69927]: value = "task-4097172" [ 1614.285978] env[69927]: _type = "Task" [ 1614.285978] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.294699] env[69927]: DEBUG oslo_vmware.api [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097172, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.595369] env[69927]: DEBUG oslo_concurrency.lockutils [None req-84d24217-eaed-4b2b-85ba-d8055368d206 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Lock "ac9403a1-23c9-40ca-adfb-90a926712dc3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 10.985s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1614.636150] env[69927]: INFO nova.compute.manager [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Rebuilding instance [ 1614.676565] env[69927]: DEBUG nova.compute.manager [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1614.677430] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4eb879a-aec6-4dd7-ac89-c0b7418a529a {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.796116] env[69927]: DEBUG oslo_vmware.api [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097172, 'name': PowerOffVM_Task, 'duration_secs': 0.198132} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.796394] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1614.796565] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1614.796824] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-342bbf3c-1eea-40dd-bcd3-09b989e0ff8e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.871572] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1614.871841] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1614.871976] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Deleting the datastore file [datastore1] eb05a071-88d4-4e51-936f-d5b7554ac204 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1614.872275] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e35dd4c-ce98-41d1-a49a-eb98052a3d44 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.880488] env[69927]: DEBUG oslo_vmware.api [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1614.880488] env[69927]: value = "task-4097174" [ 1614.880488] env[69927]: _type = "Task" [ 1614.880488] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.888844] env[69927]: DEBUG oslo_vmware.api [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097174, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.391916] env[69927]: DEBUG oslo_vmware.api [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097174, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136498} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.391916] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1615.391916] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1615.392403] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1615.392403] env[69927]: INFO nova.compute.manager [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1615.392469] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1615.392660] env[69927]: DEBUG nova.compute.manager [-] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1615.392753] env[69927]: DEBUG nova.network.neutron [-] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1615.689729] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1615.690093] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-44b5bbec-9414-4d3e-b6dc-56d01d3c8da7 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.699591] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Waiting for the task: (returnval){ [ 1615.699591] env[69927]: value = "task-4097175" [ 1615.699591] env[69927]: _type = "Task" [ 1615.699591] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.711429] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097175, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.723967] env[69927]: DEBUG nova.compute.manager [req-210922d2-ad59-423b-a6ae-e9f678069ab7 req-761fbe0f-b8f9-4f0e-814a-536bea4e5aad service nova] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Received event network-vif-deleted-961d8267-5b82-4297-96a8-4806c2f9d8a5 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1615.724179] env[69927]: INFO nova.compute.manager [req-210922d2-ad59-423b-a6ae-e9f678069ab7 req-761fbe0f-b8f9-4f0e-814a-536bea4e5aad service nova] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Neutron deleted interface 961d8267-5b82-4297-96a8-4806c2f9d8a5; detaching it from the instance and deleting it from the info cache [ 1615.724356] env[69927]: DEBUG nova.network.neutron [req-210922d2-ad59-423b-a6ae-e9f678069ab7 req-761fbe0f-b8f9-4f0e-814a-536bea4e5aad service nova] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1616.209654] env[69927]: DEBUG nova.network.neutron [-] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1616.210917] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097175, 'name': PowerOffVM_Task, 'duration_secs': 0.13027} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.211393] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1616.212030] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1616.212818] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-580acb38-da74-4cfd-be21-af97db3977cd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.221129] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1616.221370] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22769cc9-f30c-4f72-9ff4-4ef5bf978a2b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.229248] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6814b381-1734-45f3-a0b9-3b7d4a6b9ccd {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.240479] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef3f279-b242-448f-a958-1a2a89d3eeb1 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.253218] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1616.253451] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1616.253635] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Deleting the datastore file [datastore2] ac9403a1-23c9-40ca-adfb-90a926712dc3 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1616.254339] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d1af27d-7b81-479c-800c-f24a9091bb95 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.260169] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Waiting for the task: (returnval){ [ 1616.260169] env[69927]: value = "task-4097177" [ 1616.260169] env[69927]: _type = "Task" [ 1616.260169] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.268224] env[69927]: DEBUG nova.compute.manager [req-210922d2-ad59-423b-a6ae-e9f678069ab7 req-761fbe0f-b8f9-4f0e-814a-536bea4e5aad service nova] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Detach interface failed, port_id=961d8267-5b82-4297-96a8-4806c2f9d8a5, reason: Instance eb05a071-88d4-4e51-936f-d5b7554ac204 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1616.273128] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097177, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.712853] env[69927]: INFO nova.compute.manager [-] [instance: eb05a071-88d4-4e51-936f-d5b7554ac204] Took 1.32 seconds to deallocate network for instance. [ 1616.769964] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097177, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.086979} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.770240] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1616.770425] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1616.770604] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1617.219773] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1617.220080] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1617.220307] env[69927]: DEBUG nova.objects.instance [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lazy-loading 'resources' on Instance uuid eb05a071-88d4-4e51-936f-d5b7554ac204 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1617.775938] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487cba1d-2034-4f9a-b9b0-9e1a362e2804 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.786036] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54bdbcd0-51d1-42f9-a89b-54722121e835 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.819790] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e835e6-6051-4bdc-baff-70cd37c53c3c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.828861] env[69927]: DEBUG nova.virt.hardware [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-13T19:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-13T19:34:38Z,direct_url=,disk_format='vmdk',id=f524494e-9179-4b3e-a3e2-782f019def24,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cf0555225f1046838a534888181ecd96',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-13T19:34:39Z,virtual_size=,visibility=), allow threads: False {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1617.829123] env[69927]: DEBUG nova.virt.hardware [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Flavor limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1617.829282] env[69927]: DEBUG nova.virt.hardware [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Image limits 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1617.829518] env[69927]: DEBUG nova.virt.hardware [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Flavor pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1617.829613] env[69927]: DEBUG nova.virt.hardware [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Image pref 0:0:0 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1617.829754] env[69927]: DEBUG nova.virt.hardware [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69927) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1617.829958] env[69927]: DEBUG nova.virt.hardware [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1617.830133] env[69927]: DEBUG nova.virt.hardware [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1617.830300] env[69927]: DEBUG nova.virt.hardware [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Got 1 possible topologies {{(pid=69927) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1617.830465] env[69927]: DEBUG nova.virt.hardware [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1617.830646] env[69927]: DEBUG nova.virt.hardware [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69927) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1617.831908] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ae43867-a162-4c9f-b951-b0bd1f85008c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.836196] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b7224d4-7da8-4930-9576-b2c58ed57082 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.850613] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f44b65d1-2098-40a1-bc02-92120ea52064 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.854478] env[69927]: DEBUG nova.compute.provider_tree [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1617.865262] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Instance VIF info [] {{(pid=69927) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1617.870772] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1617.871538] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Creating VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1617.871740] env[69927]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-75e738d9-d9e6-4750-870f-e7f8138c09d6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.888825] env[69927]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1617.888825] env[69927]: value = "task-4097178" [ 1617.888825] env[69927]: _type = "Task" [ 1617.888825] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.896748] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097178, 'name': CreateVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.357855] env[69927]: DEBUG nova.scheduler.client.report [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1618.398832] env[69927]: DEBUG oslo_vmware.api [-] Task: {'id': task-4097178, 'name': CreateVM_Task, 'duration_secs': 0.241865} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.399059] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Created VM on the ESX host {{(pid=69927) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1618.399417] env[69927]: DEBUG oslo_concurrency.lockutils [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1618.399579] env[69927]: DEBUG oslo_concurrency.lockutils [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1618.399906] env[69927]: DEBUG oslo_concurrency.lockutils [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1618.400198] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fda865bd-2e6e-44d0-8737-fdca4a1942c8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.404652] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Waiting for the task: (returnval){ [ 1618.404652] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5271a1c7-e886-671c-e315-b0293cd1b3bc" [ 1618.404652] env[69927]: _type = "Task" [ 1618.404652] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.414044] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5271a1c7-e886-671c-e315-b0293cd1b3bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.863452] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.643s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1618.884341] env[69927]: INFO nova.scheduler.client.report [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Deleted allocations for instance eb05a071-88d4-4e51-936f-d5b7554ac204 [ 1618.915604] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5271a1c7-e886-671c-e315-b0293cd1b3bc, 'name': SearchDatastore_Task, 'duration_secs': 0.009966} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.915848] env[69927]: DEBUG oslo_concurrency.lockutils [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1618.916093] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Processing image f524494e-9179-4b3e-a3e2-782f019def24 {{(pid=69927) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1618.916329] env[69927]: DEBUG oslo_concurrency.lockutils [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1618.916478] env[69927]: DEBUG oslo_concurrency.lockutils [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1618.916656] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1618.916915] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69ba5af9-0eba-4ee9-8b25-07d9ef48fdd6 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.925673] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69927) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1618.925857] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69927) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1618.927186] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb43c3ea-4867-461a-aa0c-404e0572baa5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.933838] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Waiting for the task: (returnval){ [ 1618.933838] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5225c9d9-75e2-c6b1-213e-82574d9b6c2e" [ 1618.933838] env[69927]: _type = "Task" [ 1618.933838] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.941804] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5225c9d9-75e2-c6b1-213e-82574d9b6c2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.392190] env[69927]: DEBUG oslo_concurrency.lockutils [None req-2b4f256a-00e0-4cff-b559-fee4531fce70 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "eb05a071-88d4-4e51-936f-d5b7554ac204" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.629s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1619.444099] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]5225c9d9-75e2-c6b1-213e-82574d9b6c2e, 'name': SearchDatastore_Task, 'duration_secs': 0.009118} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.444874] env[69927]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-178a368f-b9f0-4d2c-8044-4b7185afa635 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.450473] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Waiting for the task: (returnval){ [ 1619.450473] env[69927]: value = "session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52337077-d626-90cd-5543-ea892af86234" [ 1619.450473] env[69927]: _type = "Task" [ 1619.450473] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.458969] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52337077-d626-90cd-5543-ea892af86234, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.961643] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': session[529733e4-c73a-2d3b-a8c3-e9c6e708966b]52337077-d626-90cd-5543-ea892af86234, 'name': SearchDatastore_Task, 'duration_secs': 0.010575} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.962106] env[69927]: DEBUG oslo_concurrency.lockutils [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1619.962149] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] ac9403a1-23c9-40ca-adfb-90a926712dc3/ac9403a1-23c9-40ca-adfb-90a926712dc3.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1619.962447] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5f373ec0-96d8-4ae9-b72b-a883ee59e2f0 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.969384] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Waiting for the task: (returnval){ [ 1619.969384] env[69927]: value = "task-4097180" [ 1619.969384] env[69927]: _type = "Task" [ 1619.969384] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.977282] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097180, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.478380] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097180, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489225} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.478639] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f524494e-9179-4b3e-a3e2-782f019def24/f524494e-9179-4b3e-a3e2-782f019def24.vmdk to [datastore2] ac9403a1-23c9-40ca-adfb-90a926712dc3/ac9403a1-23c9-40ca-adfb-90a926712dc3.vmdk {{(pid=69927) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1620.478854] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Extending root virtual disk to 1048576 {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1620.479113] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a3aa1293-7d01-409e-81ca-618a881a627b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.486280] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Waiting for the task: (returnval){ [ 1620.486280] env[69927]: value = "task-4097181" [ 1620.486280] env[69927]: _type = "Task" [ 1620.486280] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.493818] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097181, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.996650] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097181, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066876} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.997046] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Extended root virtual disk {{(pid=69927) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1620.997799] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a707ce68-d14a-4c7f-b320-4e6be9a3e934 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.017655] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Reconfiguring VM instance instance-00000080 to attach disk [datastore2] ac9403a1-23c9-40ca-adfb-90a926712dc3/ac9403a1-23c9-40ca-adfb-90a926712dc3.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1621.017918] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e3343a5-71da-4aee-b7f0-b2bd366d737e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.038449] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Waiting for the task: (returnval){ [ 1621.038449] env[69927]: value = "task-4097182" [ 1621.038449] env[69927]: _type = "Task" [ 1621.038449] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.047102] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097182, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.506828] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1621.549613] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097182, 'name': ReconfigVM_Task, 'duration_secs': 0.348703} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.549894] env[69927]: DEBUG nova.virt.vmwareapi.volumeops [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Reconfigured VM instance instance-00000080 to attach disk [datastore2] ac9403a1-23c9-40ca-adfb-90a926712dc3/ac9403a1-23c9-40ca-adfb-90a926712dc3.vmdk or device None with type sparse {{(pid=69927) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1621.550496] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1f673cde-3afd-494d-8087-16ca72d58191 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.556610] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Waiting for the task: (returnval){ [ 1621.556610] env[69927]: value = "task-4097183" [ 1621.556610] env[69927]: _type = "Task" [ 1621.556610] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.563984] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097183, 'name': Rename_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.067609] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097183, 'name': Rename_Task, 'duration_secs': 0.135701} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.067944] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Powering on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1622.068152] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d7cf8d4f-65b6-4fef-92e6-a31c9c2c4e9b {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.073937] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Waiting for the task: (returnval){ [ 1622.073937] env[69927]: value = "task-4097184" [ 1622.073937] env[69927]: _type = "Task" [ 1622.073937] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.081757] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097184, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.584199] env[69927]: DEBUG oslo_vmware.api [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097184, 'name': PowerOnVM_Task, 'duration_secs': 0.408849} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.584505] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Powered on the VM {{(pid=69927) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1622.584742] env[69927]: DEBUG nova.compute.manager [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Checking state {{(pid=69927) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1622.585522] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-071b5f8c-8956-42f6-8151-4d949b7c2af5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.103094] env[69927]: DEBUG oslo_concurrency.lockutils [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1623.103427] env[69927]: DEBUG oslo_concurrency.lockutils [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1623.103648] env[69927]: DEBUG nova.objects.instance [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69927) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1623.356481] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Acquiring lock "ac9403a1-23c9-40ca-adfb-90a926712dc3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1623.356710] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Lock "ac9403a1-23c9-40ca-adfb-90a926712dc3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1623.356904] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Acquiring lock "ac9403a1-23c9-40ca-adfb-90a926712dc3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1623.357106] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Lock "ac9403a1-23c9-40ca-adfb-90a926712dc3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1623.357283] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Lock "ac9403a1-23c9-40ca-adfb-90a926712dc3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1623.359484] env[69927]: INFO nova.compute.manager [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Terminating instance [ 1623.426342] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1623.426610] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1623.426820] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1623.427026] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1623.427190] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1623.429318] env[69927]: INFO nova.compute.manager [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Terminating instance [ 1623.863404] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Acquiring lock "refresh_cache-ac9403a1-23c9-40ca-adfb-90a926712dc3" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1623.863709] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Acquired lock "refresh_cache-ac9403a1-23c9-40ca-adfb-90a926712dc3" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1623.863928] env[69927]: DEBUG nova.network.neutron [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Building network info cache for instance {{(pid=69927) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1623.933146] env[69927]: DEBUG nova.compute.manager [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1623.933433] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1623.934342] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6760aff5-48d2-4c3f-9ba6-8f5dd05c5c32 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.944042] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1623.944288] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b6c3e6a9-56c1-43ab-9215-7836831d6b9f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.950735] env[69927]: DEBUG oslo_vmware.api [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1623.950735] env[69927]: value = "task-4097185" [ 1623.950735] env[69927]: _type = "Task" [ 1623.950735] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.958890] env[69927]: DEBUG oslo_vmware.api [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097185, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.114146] env[69927]: DEBUG oslo_concurrency.lockutils [None req-497a6130-0f40-46cb-ba3f-d7751f299f53 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1624.384067] env[69927]: DEBUG nova.network.neutron [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1624.432149] env[69927]: DEBUG nova.network.neutron [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1624.460418] env[69927]: DEBUG oslo_vmware.api [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097185, 'name': PowerOffVM_Task, 'duration_secs': 0.236136} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.460676] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1624.460841] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1624.461114] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-059ab980-62e2-49d0-84b4-b0c398622639 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.530760] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1624.530943] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Deleting contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1624.531106] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Deleting the datastore file [datastore1] ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1624.531379] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1c4540e7-6036-4e7d-a2fd-f3772a7e657c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.538209] env[69927]: DEBUG oslo_vmware.api [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for the task: (returnval){ [ 1624.538209] env[69927]: value = "task-4097187" [ 1624.538209] env[69927]: _type = "Task" [ 1624.538209] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.546517] env[69927]: DEBUG oslo_vmware.api [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097187, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.935204] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Releasing lock "refresh_cache-ac9403a1-23c9-40ca-adfb-90a926712dc3" {{(pid=69927) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1624.935643] env[69927]: DEBUG nova.compute.manager [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Start destroying the instance on the hypervisor. {{(pid=69927) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1624.935847] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Destroying instance {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1624.936748] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-161efe98-3d31-471a-a466-df34a04e04f8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.944499] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Powering off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1624.944757] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b64e3b87-57dc-451a-9c59-c088f81f61ee {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.951133] env[69927]: DEBUG oslo_vmware.api [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Waiting for the task: (returnval){ [ 1624.951133] env[69927]: value = "task-4097188" [ 1624.951133] env[69927]: _type = "Task" [ 1624.951133] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.959348] env[69927]: DEBUG oslo_vmware.api [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097188, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.048250] env[69927]: DEBUG oslo_vmware.api [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Task: {'id': task-4097187, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128555} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.048679] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1625.048932] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Deleted contents of the VM from datastore datastore1 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1625.049160] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1625.049343] env[69927]: INFO nova.compute.manager [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1625.049588] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1625.049774] env[69927]: DEBUG nova.compute.manager [-] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1625.049861] env[69927]: DEBUG nova.network.neutron [-] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1625.311927] env[69927]: DEBUG nova.compute.manager [req-4994487e-9133-4615-ad8c-734d596c23b7 req-59205407-714f-4645-be5e-04b1eab4c5f8 service nova] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Received event network-vif-deleted-cefb857a-0190-4924-a605-749a4858cef2 {{(pid=69927) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1625.312977] env[69927]: INFO nova.compute.manager [req-4994487e-9133-4615-ad8c-734d596c23b7 req-59205407-714f-4645-be5e-04b1eab4c5f8 service nova] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Neutron deleted interface cefb857a-0190-4924-a605-749a4858cef2; detaching it from the instance and deleting it from the info cache [ 1625.312977] env[69927]: DEBUG nova.network.neutron [req-4994487e-9133-4615-ad8c-734d596c23b7 req-59205407-714f-4645-be5e-04b1eab4c5f8 service nova] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1625.462326] env[69927]: DEBUG oslo_vmware.api [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097188, 'name': PowerOffVM_Task, 'duration_secs': 0.111989} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.462693] env[69927]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Powered off the VM {{(pid=69927) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1625.462994] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Unregistering the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1625.463409] env[69927]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b7a10920-a066-4b00-9d6c-31b4851945c3 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.488940] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Unregistered the VM {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1625.489184] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Deleting contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1625.489342] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Deleting the datastore file [datastore2] ac9403a1-23c9-40ca-adfb-90a926712dc3 {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1625.489608] env[69927]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fd78f3ef-f633-4bf9-8768-50787e948f05 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.496666] env[69927]: DEBUG oslo_vmware.api [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Waiting for the task: (returnval){ [ 1625.496666] env[69927]: value = "task-4097190" [ 1625.496666] env[69927]: _type = "Task" [ 1625.496666] env[69927]: } to complete. {{(pid=69927) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.505355] env[69927]: DEBUG oslo_vmware.api [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097190, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.798059] env[69927]: DEBUG nova.network.neutron [-] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1625.815908] env[69927]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-99b9826c-7a12-487b-a420-48989fdc23d8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.831159] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89fdc813-cba9-40ef-8ac1-c855b12c8ca5 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.869820] env[69927]: DEBUG nova.compute.manager [req-4994487e-9133-4615-ad8c-734d596c23b7 req-59205407-714f-4645-be5e-04b1eab4c5f8 service nova] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Detach interface failed, port_id=cefb857a-0190-4924-a605-749a4858cef2, reason: Instance ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6 could not be found. {{(pid=69927) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1626.009910] env[69927]: DEBUG oslo_vmware.api [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Task: {'id': task-4097190, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.108011} completed successfully. {{(pid=69927) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.010288] env[69927]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Deleted the datastore file {{(pid=69927) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1626.010557] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Deleted contents of the VM from datastore datastore2 {{(pid=69927) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1626.010823] env[69927]: DEBUG nova.virt.vmwareapi.vmops [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Instance destroyed {{(pid=69927) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1626.011086] env[69927]: INFO nova.compute.manager [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1626.011727] env[69927]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69927) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1626.011727] env[69927]: DEBUG nova.compute.manager [-] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Deallocating network for instance {{(pid=69927) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1626.011923] env[69927]: DEBUG nova.network.neutron [-] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] deallocate_for_instance() {{(pid=69927) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1626.028671] env[69927]: DEBUG nova.network.neutron [-] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Instance cache missing network info. {{(pid=69927) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1626.300385] env[69927]: INFO nova.compute.manager [-] [instance: ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6] Took 1.25 seconds to deallocate network for instance. [ 1626.531463] env[69927]: DEBUG nova.network.neutron [-] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Updating instance_info_cache with network_info: [] {{(pid=69927) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1626.806808] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1626.807034] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1626.807261] env[69927]: DEBUG nova.objects.instance [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lazy-loading 'resources' on Instance uuid ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1627.034084] env[69927]: INFO nova.compute.manager [-] [instance: ac9403a1-23c9-40ca-adfb-90a926712dc3] Took 1.02 seconds to deallocate network for instance. [ 1627.358219] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71978421-fcbe-4915-97e7-289570f1ec4c {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.366134] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e8ffcad-9c85-4ac8-9738-76c94f5d5258 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.396754] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e19a562-c3b9-4671-a768-bbab7d097f9f {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.404534] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3517b690-224f-44ca-9976-30cc91f2f4a4 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.419444] env[69927]: DEBUG nova.compute.provider_tree [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1627.540574] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1627.922481] env[69927]: DEBUG nova.scheduler.client.report [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1628.427886] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.621s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1628.430636] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.890s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1628.430636] env[69927]: DEBUG nova.objects.instance [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Lazy-loading 'resources' on Instance uuid ac9403a1-23c9-40ca-adfb-90a926712dc3 {{(pid=69927) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1628.446381] env[69927]: INFO nova.scheduler.client.report [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Deleted allocations for instance ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6 [ 1628.954942] env[69927]: DEBUG oslo_concurrency.lockutils [None req-d130057b-1419-4c86-acfc-b23f84eb6e25 tempest-ServerActionsTestOtherA-296658517 tempest-ServerActionsTestOtherA-296658517-project-member] Lock "ccfa1bf0-fdca-41ff-8003-f671f6f6f6d6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.528s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1628.963669] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d86f0ffb-e396-4c6c-8a72-b303b206ed91 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.973008] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0540467-917a-4c7b-b100-6eed3c1e12a8 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.005069] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a20ffcb4-83e0-4a67-bc95-7d20536dee02 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.012908] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24785af6-8b88-46ec-b511-4f6df4174ebe {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.027376] env[69927]: DEBUG nova.compute.provider_tree [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1629.507207] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1629.507408] env[69927]: DEBUG nova.compute.manager [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69927) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1629.530693] env[69927]: DEBUG nova.scheduler.client.report [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1630.036024] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.606s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1630.054667] env[69927]: INFO nova.scheduler.client.report [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Deleted allocations for instance ac9403a1-23c9-40ca-adfb-90a926712dc3 [ 1630.508293] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1630.563401] env[69927]: DEBUG oslo_concurrency.lockutils [None req-dc018a18-1437-49d6-b84c-1edbe243aae8 tempest-ServersListShow296Test-510226241 tempest-ServersListShow296Test-510226241-project-member] Lock "ac9403a1-23c9-40ca-adfb-90a926712dc3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.207s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1631.509172] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1631.509576] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1631.509732] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1632.504869] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1632.507538] env[69927]: DEBUG oslo_service.periodic_task [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Running periodic task ComputeManager.update_available_resource {{(pid=69927) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1633.010928] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1633.011331] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1633.011384] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1633.011521] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69927) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1633.012398] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b929578-86f9-480f-b26f-4873016c3a27 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.020970] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed691ebb-d045-4fe0-b9ae-709471cd93df {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.035055] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec3d916-2e7f-4248-9dfa-6e3f46f95e02 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.041752] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a968542-50b7-4dcb-bf7e-bd88e9047e5e {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.070041] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180162MB free_disk=17GB free_vcpus=48 pci_devices=None {{(pid=69927) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1633.070182] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1633.070400] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1634.092053] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1634.092443] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=69927) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1634.105994] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f4618e-89d2-495c-be06-fcbe921b845d {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.113701] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-708e196f-2cff-42cd-90ba-c4d597e8e957 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.143981] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac62af96-d27e-4244-8d81-43ec85a26092 {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.151516] env[69927]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b13d5f-d1c1-4651-a6b0-4030529a4dfa {{(pid=69927) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.164806] env[69927]: DEBUG nova.compute.provider_tree [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Inventory has not changed in ProviderTree for provider: 2f529b36-df5f-4b37-8103-68f74f737726 {{(pid=69927) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1634.668087] env[69927]: DEBUG nova.scheduler.client.report [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Inventory has not changed for provider 2f529b36-df5f-4b37-8103-68f74f737726 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 17, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69927) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1635.173119] env[69927]: DEBUG nova.compute.resource_tracker [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69927) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1635.173421] env[69927]: DEBUG oslo_concurrency.lockutils [None req-5c5889eb-d518-4786-b969-75fdede9376e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.103s {{(pid=69927) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}}